Home | History | Annotate | Download | only in videosink
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 
     18 package android.filterpacks.videosink;
     19 
     20 import android.content.Context;
     21 import android.filterfw.core.Filter;
     22 import android.filterfw.core.FilterContext;
     23 import android.filterfw.core.Frame;
     24 import android.filterfw.core.FrameFormat;
     25 import android.filterfw.core.FrameManager;
     26 import android.filterfw.core.GenerateFieldPort;
     27 import android.filterfw.core.GenerateFinalPort;
     28 import android.filterfw.core.GLFrame;
     29 import android.filterfw.core.KeyValueMap;
     30 import android.filterfw.core.MutableFrameFormat;
     31 import android.filterfw.core.NativeFrame;
     32 import android.filterfw.core.Program;
     33 import android.filterfw.core.ShaderProgram;
     34 import android.filterfw.format.ImageFormat;
     35 import android.filterfw.geometry.Point;
     36 import android.filterfw.geometry.Quad;
     37 import android.os.ConditionVariable;
     38 import android.media.MediaRecorder;
     39 import android.media.CamcorderProfile;
     40 import android.filterfw.core.GLEnvironment;
     41 
     42 import java.io.IOException;
     43 import java.io.FileDescriptor;
     44 import java.util.List;
     45 import java.util.Set;
     46 
     47 import android.util.Log;
     48 
     49 /** @hide */
     50 public class MediaEncoderFilter extends Filter {
     51 
     52     /** User-visible parameters */
     53 
     54     /** Recording state. When set to false, recording will stop, or will not
     55      * start if not yet running the graph. Instead, frames are simply ignored.
     56      * When switched back to true, recording will restart. This allows a single
     57      * graph to both provide preview and to record video. If this is false,
     58      * recording settings can be updated while the graph is running.
     59      */
     60     @GenerateFieldPort(name = "recording", hasDefault = true)
     61     private boolean mRecording = true;
     62 
     63     /** Filename to save the output. */
     64     @GenerateFieldPort(name = "outputFile", hasDefault = true)
     65     private String mOutputFile = new String("/sdcard/MediaEncoderOut.mp4");
     66 
     67     /** File Descriptor to save the output. */
     68     @GenerateFieldPort(name = "outputFileDescriptor", hasDefault = true)
     69     private FileDescriptor mFd = null;
     70 
     71     /** Input audio source. If not set, no audio will be recorded.
     72      * Select from the values in MediaRecorder.AudioSource
     73      */
     74     @GenerateFieldPort(name = "audioSource", hasDefault = true)
     75     private int mAudioSource = NO_AUDIO_SOURCE;
     76 
     77     /** Media recorder info listener, which needs to implement
     78      * MediaRecorder.OnInfoListener. Set this to receive notifications about
     79      * recording events.
     80      */
     81     @GenerateFieldPort(name = "infoListener", hasDefault = true)
     82     private MediaRecorder.OnInfoListener mInfoListener = null;
     83 
     84     /** Media recorder error listener, which needs to implement
     85      * MediaRecorder.OnErrorListener. Set this to receive notifications about
     86      * recording errors.
     87      */
     88     @GenerateFieldPort(name = "errorListener", hasDefault = true)
     89     private MediaRecorder.OnErrorListener mErrorListener = null;
     90 
     91     /** Media recording done callback, which needs to implement OnRecordingDoneListener.
     92      * Set this to finalize media upon completion of media recording.
     93      */
     94     @GenerateFieldPort(name = "recordingDoneListener", hasDefault = true)
     95     private OnRecordingDoneListener mRecordingDoneListener = null;
     96 
     97     /** Orientation hint. Used for indicating proper video playback orientation.
     98      * Units are in degrees of clockwise rotation, valid values are (0, 90, 180,
     99      * 270).
    100      */
    101     @GenerateFieldPort(name = "orientationHint", hasDefault = true)
    102     private int mOrientationHint = 0;
    103 
    104     /** Camcorder profile to use. Select from the profiles available in
    105      * android.media.CamcorderProfile. If this field is set, it overrides
    106      * settings to width, height, framerate, outputFormat, and videoEncoder.
    107      */
    108     @GenerateFieldPort(name = "recordingProfile", hasDefault = true)
    109     private CamcorderProfile mProfile = null;
    110 
    111     /** Frame width to be encoded, defaults to 320.
    112      * Actual received frame size has to match this */
    113     @GenerateFieldPort(name = "width", hasDefault = true)
    114     private int mWidth = 0;
    115 
    116     /** Frame height to to be encoded, defaults to 240.
    117      * Actual received frame size has to match */
    118     @GenerateFieldPort(name = "height", hasDefault = true)
    119     private int mHeight = 0;
    120 
    121     /** Stream framerate to encode the frames at.
    122      * By default, frames are encoded at 30 FPS*/
    123     @GenerateFieldPort(name = "framerate", hasDefault = true)
    124     private int mFps = 30;
    125 
    126     /** The output format to encode the frames in.
    127      * Choose an output format from the options in
    128      * android.media.MediaRecorder.OutputFormat */
    129     @GenerateFieldPort(name = "outputFormat", hasDefault = true)
    130     private int mOutputFormat = MediaRecorder.OutputFormat.MPEG_4;
    131 
    132     /** The videoencoder to encode the frames with.
    133      * Choose a videoencoder from the options in
    134      * android.media.MediaRecorder.VideoEncoder */
    135     @GenerateFieldPort(name = "videoEncoder", hasDefault = true)
    136     private int mVideoEncoder = MediaRecorder.VideoEncoder.H264;
    137 
    138     /** The input region to read from the frame. The corners of this quad are
    139      * mapped to the output rectangle. The input frame ranges from (0,0)-(1,1),
    140      * top-left to bottom-right. The corners of the quad are specified in the
    141      * order bottom-left, bottom-right, top-left, top-right.
    142      */
    143     @GenerateFieldPort(name = "inputRegion", hasDefault = true)
    144     private Quad mSourceRegion;
    145 
    146     /** The maximum filesize (in bytes) of the recording session.
    147      * By default, it will be 0 and will be passed on to the MediaRecorder.
    148      * If the limit is zero or negative, MediaRecorder will disable the limit*/
    149     @GenerateFieldPort(name = "maxFileSize", hasDefault = true)
    150     private long mMaxFileSize = 0;
    151 
    152     /** The maximum duration (in milliseconds) of the recording session.
    153      * By default, it will be 0 and will be passed on to the MediaRecorder.
    154      * If the limit is zero or negative, MediaRecorder will record indefinitely*/
    155     @GenerateFieldPort(name = "maxDurationMs", hasDefault = true)
    156     private int mMaxDurationMs = 0;
    157 
    158     /** TimeLapse Interval between frames.
    159      * By default, it will be 0. Whether the recording is timelapsed
    160      * is inferred based on its value being greater than 0 */
    161     @GenerateFieldPort(name = "timelapseRecordingIntervalUs", hasDefault = true)
    162     private long mTimeBetweenTimeLapseFrameCaptureUs = 0;
    163 
    164     // End of user visible parameters
    165 
    166     private static final int NO_AUDIO_SOURCE = -1;
    167 
    168     private int mSurfaceId;
    169     private ShaderProgram mProgram;
    170     private GLFrame mScreen;
    171 
    172     private boolean mRecordingActive = false;
    173     private long mTimestampNs = 0;
    174     private long mLastTimeLapseFrameRealTimestampNs = 0;
    175     private int mNumFramesEncoded = 0;
    176     // Used to indicate whether recording is timelapsed.
    177     // Inferred based on (mTimeBetweenTimeLapseFrameCaptureUs > 0)
    178     private boolean mCaptureTimeLapse = false;
    179 
    180     private boolean mLogVerbose;
    181     private static final String TAG = "MediaEncoderFilter";
    182 
    183     // Our hook to the encoder
    184     private MediaRecorder mMediaRecorder;
    185 
    186     /** Callback to be called when media recording completes. */
    187 
    188     public interface OnRecordingDoneListener {
    189         public void onRecordingDone();
    190     }
    191 
    192     public MediaEncoderFilter(String name) {
    193         super(name);
    194         Point bl = new Point(0, 0);
    195         Point br = new Point(1, 0);
    196         Point tl = new Point(0, 1);
    197         Point tr = new Point(1, 1);
    198         mSourceRegion = new Quad(bl, br, tl, tr);
    199         mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
    200     }
    201 
    202     @Override
    203     public void setupPorts() {
    204         // Add input port- will accept RGBA GLFrames
    205         addMaskedInputPort("videoframe", ImageFormat.create(ImageFormat.COLORSPACE_RGBA,
    206                                                       FrameFormat.TARGET_GPU));
    207     }
    208 
    209     @Override
    210     public void fieldPortValueUpdated(String name, FilterContext context) {
    211         if (mLogVerbose) Log.v(TAG, "Port " + name + " has been updated");
    212         if (name.equals("recording")) return;
    213         if (name.equals("inputRegion")) {
    214             if (isOpen()) updateSourceRegion();
    215             return;
    216         }
    217         // TODO: Not sure if it is possible to update the maxFileSize
    218         // when the recording is going on. For now, not doing that.
    219         if (isOpen() && mRecordingActive) {
    220             throw new RuntimeException("Cannot change recording parameters"
    221                                        + " when the filter is recording!");
    222         }
    223     }
    224 
    225     private void updateSourceRegion() {
    226         // Flip source quad to map to OpenGL origin
    227         Quad flippedRegion = new Quad();
    228         flippedRegion.p0 = mSourceRegion.p2;
    229         flippedRegion.p1 = mSourceRegion.p3;
    230         flippedRegion.p2 = mSourceRegion.p0;
    231         flippedRegion.p3 = mSourceRegion.p1;
    232         mProgram.setSourceRegion(flippedRegion);
    233     }
    234 
    235     // update the MediaRecorderParams based on the variables.
    236     // These have to be in certain order as per the MediaRecorder
    237     // documentation
    238     private void updateMediaRecorderParams() {
    239         mCaptureTimeLapse = mTimeBetweenTimeLapseFrameCaptureUs > 0;
    240         final int GRALLOC_BUFFER = 2;
    241         mMediaRecorder.setVideoSource(GRALLOC_BUFFER);
    242         if (!mCaptureTimeLapse && (mAudioSource != NO_AUDIO_SOURCE)) {
    243             mMediaRecorder.setAudioSource(mAudioSource);
    244         }
    245         if (mProfile != null) {
    246             mMediaRecorder.setProfile(mProfile);
    247             mFps = mProfile.videoFrameRate;
    248             // If width and height are set larger than 0, then those
    249             // overwrite the ones in the profile.
    250             if (mWidth > 0 && mHeight > 0) {
    251                 mMediaRecorder.setVideoSize(mWidth, mHeight);
    252             }
    253         } else {
    254             mMediaRecorder.setOutputFormat(mOutputFormat);
    255             mMediaRecorder.setVideoEncoder(mVideoEncoder);
    256             mMediaRecorder.setVideoSize(mWidth, mHeight);
    257             mMediaRecorder.setVideoFrameRate(mFps);
    258         }
    259         mMediaRecorder.setOrientationHint(mOrientationHint);
    260         mMediaRecorder.setOnInfoListener(mInfoListener);
    261         mMediaRecorder.setOnErrorListener(mErrorListener);
    262         if (mFd != null) {
    263             mMediaRecorder.setOutputFile(mFd);
    264         } else {
    265             mMediaRecorder.setOutputFile(mOutputFile);
    266         }
    267         try {
    268             mMediaRecorder.setMaxFileSize(mMaxFileSize);
    269         } catch (Exception e) {
    270             // Following the logic in  VideoCamera.java (in Camera app)
    271             // We are going to ignore failure of setMaxFileSize here, as
    272             // a) The composer selected may simply not support it, or
    273             // b) The underlying media framework may not handle 64-bit range
    274             // on the size restriction.
    275             Log.w(TAG, "Setting maxFileSize on MediaRecorder unsuccessful! "
    276                     + e.getMessage());
    277         }
    278         mMediaRecorder.setMaxDuration(mMaxDurationMs);
    279     }
    280 
    281     @Override
    282     public void prepare(FilterContext context) {
    283         if (mLogVerbose) Log.v(TAG, "Preparing");
    284 
    285         mProgram = ShaderProgram.createIdentity(context);
    286 
    287         mRecordingActive = false;
    288     }
    289 
    290     @Override
    291     public void open(FilterContext context) {
    292         if (mLogVerbose) Log.v(TAG, "Opening");
    293         updateSourceRegion();
    294         if (mRecording) startRecording(context);
    295     }
    296 
    297     private void startRecording(FilterContext context) {
    298         if (mLogVerbose) Log.v(TAG, "Starting recording");
    299 
    300         // Create a frame representing the screen
    301         MutableFrameFormat screenFormat = new MutableFrameFormat(
    302                               FrameFormat.TYPE_BYTE, FrameFormat.TARGET_GPU);
    303         screenFormat.setBytesPerSample(4);
    304 
    305         int width, height;
    306         boolean widthHeightSpecified = mWidth > 0 && mHeight > 0;
    307         // If width and height are specified, then use those instead
    308         // of that in the profile.
    309         if (mProfile != null && !widthHeightSpecified) {
    310             width = mProfile.videoFrameWidth;
    311             height = mProfile.videoFrameHeight;
    312         } else {
    313             width = mWidth;
    314             height = mHeight;
    315         }
    316         screenFormat.setDimensions(width, height);
    317         mScreen = (GLFrame)context.getFrameManager().newBoundFrame(
    318                            screenFormat, GLFrame.EXISTING_FBO_BINDING, 0);
    319 
    320         // Initialize the media recorder
    321 
    322         mMediaRecorder = new MediaRecorder();
    323         updateMediaRecorderParams();
    324 
    325         try {
    326             mMediaRecorder.prepare();
    327         } catch (IllegalStateException e) {
    328             throw e;
    329         } catch (IOException e) {
    330             throw new RuntimeException("IOException in"
    331                     + "MediaRecorder.prepare()!", e);
    332         } catch (Exception e) {
    333             throw new RuntimeException("Unknown Exception in"
    334                     + "MediaRecorder.prepare()!", e);
    335         }
    336         // Make sure start() is called before trying to
    337         // register the surface. The native window handle needed to create
    338         // the surface is initiated in start()
    339         mMediaRecorder.start();
    340         if (mLogVerbose) Log.v(TAG, "Open: registering surface from Mediarecorder");
    341         mSurfaceId = context.getGLEnvironment().
    342                 registerSurfaceFromMediaRecorder(mMediaRecorder);
    343         mNumFramesEncoded = 0;
    344         mRecordingActive = true;
    345     }
    346 
    347     public boolean skipFrameAndModifyTimestamp(long timestampNs) {
    348         // first frame- encode. Don't skip
    349         if (mNumFramesEncoded == 0) {
    350             mLastTimeLapseFrameRealTimestampNs = timestampNs;
    351             mTimestampNs = timestampNs;
    352             if (mLogVerbose) Log.v(TAG, "timelapse: FIRST frame, last real t= "
    353                     + mLastTimeLapseFrameRealTimestampNs +
    354                     ", setting t = " + mTimestampNs );
    355             return false;
    356         }
    357 
    358         // Workaround to bypass the first 2 input frames for skipping.
    359         // The first 2 output frames from the encoder are: decoder specific info and
    360         // the compressed video frame data for the first input video frame.
    361         if (mNumFramesEncoded >= 2 && timestampNs <
    362             (mLastTimeLapseFrameRealTimestampNs +  1000L * mTimeBetweenTimeLapseFrameCaptureUs)) {
    363             // If 2 frames have been already encoded,
    364             // Skip all frames from last encoded frame until
    365             // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
    366             if (mLogVerbose) Log.v(TAG, "timelapse: skipping intermediate frame");
    367             return true;
    368         } else {
    369             // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
    370             // - Reset mLastTimeLapseFrameRealTimestampNs to current time.
    371             // - Artificially modify timestampNs to be one frame time (1/framerate) ahead
    372             // of the last encoded frame's time stamp.
    373             if (mLogVerbose) Log.v(TAG, "timelapse: encoding frame, Timestamp t = " + timestampNs +
    374                     ", last real t= " + mLastTimeLapseFrameRealTimestampNs +
    375                     ", interval = " + mTimeBetweenTimeLapseFrameCaptureUs);
    376             mLastTimeLapseFrameRealTimestampNs = timestampNs;
    377             mTimestampNs = mTimestampNs + (1000000000L / (long)mFps);
    378             if (mLogVerbose) Log.v(TAG, "timelapse: encoding frame, setting t = "
    379                     + mTimestampNs + ", delta t = " + (1000000000L / (long)mFps) +
    380                     ", fps = " + mFps );
    381             return false;
    382         }
    383     }
    384 
    385     @Override
    386     public void process(FilterContext context) {
    387         GLEnvironment glEnv = context.getGLEnvironment();
    388         // Get input frame
    389         Frame input = pullInput("videoframe");
    390 
    391         // Check if recording needs to start
    392         if (!mRecordingActive && mRecording) {
    393             startRecording(context);
    394         }
    395         // Check if recording needs to stop
    396         if (mRecordingActive && !mRecording) {
    397             stopRecording(context);
    398         }
    399 
    400         if (!mRecordingActive) return;
    401 
    402         if (mCaptureTimeLapse) {
    403             if (skipFrameAndModifyTimestamp(input.getTimestamp())) {
    404                 return;
    405             }
    406         } else {
    407             mTimestampNs = input.getTimestamp();
    408         }
    409 
    410         // Activate our surface
    411         glEnv.activateSurfaceWithId(mSurfaceId);
    412 
    413         // Process
    414         mProgram.process(input, mScreen);
    415 
    416         // Set timestamp from input
    417         glEnv.setSurfaceTimestamp(mTimestampNs);
    418         // And swap buffers
    419         glEnv.swapBuffers();
    420         mNumFramesEncoded++;
    421     }
    422 
    423     private void stopRecording(FilterContext context) {
    424         if (mLogVerbose) Log.v(TAG, "Stopping recording");
    425 
    426         mRecordingActive = false;
    427         mNumFramesEncoded = 0;
    428         GLEnvironment glEnv = context.getGLEnvironment();
    429         // The following call will switch the surface_id to 0
    430         // (thus, calling eglMakeCurrent on surface with id 0) and
    431         // then call eglDestroy on the surface. Hence, this will
    432         // call disconnect the SurfaceMediaSource, which is needed to
    433         // be called before calling Stop on the mediarecorder
    434         if (mLogVerbose) Log.v(TAG, String.format("Unregistering surface %d", mSurfaceId));
    435         glEnv.unregisterSurfaceId(mSurfaceId);
    436         try {
    437             mMediaRecorder.stop();
    438         } catch (RuntimeException e) {
    439             throw new MediaRecorderStopException("MediaRecorder.stop() failed!", e);
    440         }
    441         mMediaRecorder.release();
    442         mMediaRecorder = null;
    443 
    444         mScreen.release();
    445         mScreen = null;
    446 
    447         // Use an EffectsRecorder callback to forward a media finalization
    448         // call so that it creates the video thumbnail, and whatever else needs
    449         // to be done to finalize media.
    450         if (mRecordingDoneListener != null) {
    451             mRecordingDoneListener.onRecordingDone();
    452         }
    453     }
    454 
    455     @Override
    456     public void close(FilterContext context) {
    457         if (mLogVerbose) Log.v(TAG, "Closing");
    458         if (mRecordingActive) stopRecording(context);
    459     }
    460 
    461     @Override
    462     public void tearDown(FilterContext context) {
    463         // Release all the resources associated with the MediaRecorder
    464         // and GLFrame members
    465         if (mMediaRecorder != null) {
    466             mMediaRecorder.release();
    467         }
    468         if (mScreen != null) {
    469             mScreen.release();
    470         }
    471 
    472     }
    473 
    474 }
    475