Home | History | Annotate | Download | only in camera
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License"); you may not
      5  * use this file except in compliance with the License. You may obtain a copy of
      6  * the License at
      7  *
      8  * http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
     12  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
     13  * License for the specific language governing permissions and limitations under
     14  * the License.
     15  */
     16 
     17 package com.android.camera;
     18 
     19 import android.content.Context;
     20 import android.content.res.AssetFileDescriptor;
     21 import android.filterfw.GraphEnvironment;
     22 import android.filterfw.core.Filter;
     23 import android.filterfw.core.GLEnvironment;
     24 import android.filterfw.core.GraphRunner;
     25 import android.filterfw.core.GraphRunner.OnRunnerDoneListener;
     26 import android.filterfw.geometry.Point;
     27 import android.filterfw.geometry.Quad;
     28 import android.filterpacks.videoproc.BackDropperFilter;
     29 import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
     30 import android.filterpacks.videosink.MediaEncoderFilter.OnRecordingDoneListener;
     31 import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
     32 
     33 import android.graphics.SurfaceTexture;
     34 import android.hardware.Camera;
     35 import android.media.MediaRecorder;
     36 import android.media.CamcorderProfile;
     37 import android.os.ConditionVariable;
     38 import android.os.Handler;
     39 import android.os.Looper;
     40 import android.os.ParcelFileDescriptor;
     41 import android.os.SystemProperties;
     42 import android.util.Log;
     43 import android.view.Surface;
     44 import android.view.SurfaceHolder;
     45 
     46 import java.io.IOException;
     47 import java.io.FileNotFoundException;
     48 import java.io.File;
     49 import java.lang.Runnable;
     50 import java.io.FileDescriptor;
     51 
     52 
     53 /**
     54  * Encapsulates the mobile filter framework components needed to record video with
     55  * effects applied. Modeled after MediaRecorder.
     56  */
     57 public class EffectsRecorder {
     58 
     59     public static final int  EFFECT_NONE        = 0;
     60     public static final int  EFFECT_GOOFY_FACE  = 1;
     61     public static final int  EFFECT_BACKDROPPER = 2;
     62 
     63     public static final int  EFFECT_GF_SQUEEZE     = 0;
     64     public static final int  EFFECT_GF_BIG_EYES    = 1;
     65     public static final int  EFFECT_GF_BIG_MOUTH   = 2;
     66     public static final int  EFFECT_GF_SMALL_MOUTH = 3;
     67     public static final int  EFFECT_GF_BIG_NOSE    = 4;
     68     public static final int  EFFECT_GF_SMALL_EYES  = 5;
     69     public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
     70 
     71     public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
     72     public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
     73     public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
     74     public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
     75     public static final int  EFFECT_MSG_RECORDING_DONE   = 4;
     76 
     77     private Context mContext;
     78     private Handler mHandler;
     79     private boolean mReleased;
     80 
     81     private Camera mCameraDevice;
     82     private CamcorderProfile mProfile;
     83     private double mCaptureRate = 0;
     84     private SurfaceHolder mPreviewSurfaceHolder;
     85     private int mPreviewWidth;
     86     private int mPreviewHeight;
     87     private MediaRecorder.OnInfoListener mInfoListener;
     88     private MediaRecorder.OnErrorListener mErrorListener;
     89 
     90     private String mOutputFile;
     91     private FileDescriptor mFd;
     92     private int mOrientationHint = 0;
     93     private long mMaxFileSize = 0;
     94     private int mMaxDurationMs = 0;
     95     private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
     96 
     97     private int mEffect = EFFECT_NONE;
     98     private int mCurrentEffect = EFFECT_NONE;
     99     private EffectsListener mEffectsListener;
    100 
    101     private Object mEffectParameter;
    102 
    103     private GraphEnvironment mGraphEnv;
    104     private int mGraphId;
    105     private GraphRunner mRunner = null;
    106     private GraphRunner mOldRunner = null;
    107 
    108     private SurfaceTexture mTextureSource;
    109 
    110     private static final String mVideoRecordSound = "/system/media/audio/ui/VideoRecord.ogg";
    111     private SoundPlayer mRecordSound;
    112 
    113     private static final int STATE_CONFIGURE              = 0;
    114     private static final int STATE_WAITING_FOR_SURFACE    = 1;
    115     private static final int STATE_STARTING_PREVIEW       = 2;
    116     private static final int STATE_PREVIEW                = 3;
    117     private static final int STATE_RECORD                 = 4;
    118     private static final int STATE_RELEASED               = 5;
    119     private int mState = STATE_CONFIGURE;
    120 
    121     private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
    122     private static final String TAG = "effectsrecorder";
    123 
    124     /** Determine if a given effect is supported at runtime
    125      * Some effects require libraries not available on all devices
    126      */
    127     public static boolean isEffectSupported(int effectId) {
    128         switch (effectId) {
    129             case EFFECT_GOOFY_FACE:
    130                 return Filter.isAvailable("com.google.android.filterpacks.facedetect.GoofyRenderFilter");
    131             case EFFECT_BACKDROPPER:
    132                 return Filter.isAvailable("android.filterpacks.videoproc.BackDropperFilter");
    133             default:
    134                 return false;
    135         }
    136     }
    137 
    138     public EffectsRecorder(Context context) {
    139         if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
    140         mContext = context;
    141         mHandler = new Handler(Looper.getMainLooper());
    142 
    143         // Construct sound player; use enforced sound output if necessary
    144         File recordSoundFile = new File(mVideoRecordSound);
    145         try {
    146             ParcelFileDescriptor recordSoundParcel =
    147                     ParcelFileDescriptor.open(recordSoundFile,
    148                             ParcelFileDescriptor.MODE_READ_ONLY);
    149             AssetFileDescriptor recordSoundAsset =
    150                     new AssetFileDescriptor(recordSoundParcel, 0,
    151                                             AssetFileDescriptor.UNKNOWN_LENGTH);
    152             if (SystemProperties.get("ro.camera.sound.forced", "0").equals("0")) {
    153                 if (mLogVerbose) Log.v(TAG, "Standard recording sound");
    154                 mRecordSound = new SoundPlayer(recordSoundAsset, false);
    155             } else {
    156                 if (mLogVerbose) Log.v(TAG, "Forced recording sound");
    157                 mRecordSound = new SoundPlayer(recordSoundAsset, true);
    158             }
    159         } catch (java.io.FileNotFoundException e) {
    160             Log.e(TAG, "System video record sound not found");
    161             mRecordSound = null;
    162         }
    163 
    164     }
    165 
    166     public void setCamera(Camera cameraDevice) {
    167         switch (mState) {
    168             case STATE_PREVIEW:
    169                 throw new RuntimeException("setCamera cannot be called while previewing!");
    170             case STATE_RECORD:
    171                 throw new RuntimeException("setCamera cannot be called while recording!");
    172             case STATE_RELEASED:
    173                 throw new RuntimeException("setCamera called on an already released recorder!");
    174             default:
    175                 break;
    176         }
    177 
    178         mCameraDevice = cameraDevice;
    179     }
    180 
    181     public void setProfile(CamcorderProfile profile) {
    182         switch (mState) {
    183             case STATE_RECORD:
    184                 throw new RuntimeException("setProfile cannot be called while recording!");
    185             case STATE_RELEASED:
    186                 throw new RuntimeException("setProfile called on an already released recorder!");
    187             default:
    188                 break;
    189         }
    190         mProfile = profile;
    191     }
    192 
    193     public void setOutputFile(String outputFile) {
    194         switch (mState) {
    195             case STATE_RECORD:
    196                 throw new RuntimeException("setOutputFile cannot be called while recording!");
    197             case STATE_RELEASED:
    198                 throw new RuntimeException("setOutputFile called on an already released recorder!");
    199             default:
    200                 break;
    201         }
    202 
    203         mOutputFile = outputFile;
    204         mFd = null;
    205     }
    206 
    207     public void setOutputFile(FileDescriptor fd) {
    208         switch (mState) {
    209             case STATE_RECORD:
    210                 throw new RuntimeException("setOutputFile cannot be called while recording!");
    211             case STATE_RELEASED:
    212                 throw new RuntimeException("setOutputFile called on an already released recorder!");
    213             default:
    214                 break;
    215         }
    216 
    217         mOutputFile = null;
    218         mFd = fd;
    219     }
    220 
    221     /**
    222      * Sets the maximum filesize (in bytes) of the recording session.
    223      * This will be passed on to the MediaEncoderFilter and then to the
    224      * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
    225      * disable the limit
    226     */
    227     public synchronized void setMaxFileSize(long maxFileSize) {
    228         switch (mState) {
    229             case STATE_RECORD:
    230                 throw new RuntimeException("setMaxFileSize cannot be called while recording!");
    231             case STATE_RELEASED:
    232                 throw new RuntimeException("setMaxFileSize called on an already released recorder!");
    233             default:
    234                 break;
    235         }
    236         mMaxFileSize = maxFileSize;
    237     }
    238 
    239     /**
    240     * Sets the maximum recording duration (in ms) for the next recording session
    241     * Setting it to zero (the default) disables the limit.
    242     */
    243     public synchronized void setMaxDuration(int maxDurationMs) {
    244         switch (mState) {
    245             case STATE_RECORD:
    246                 throw new RuntimeException("setMaxDuration cannot be called while recording!");
    247             case STATE_RELEASED:
    248                 throw new RuntimeException("setMaxDuration called on an already released recorder!");
    249             default:
    250                 break;
    251         }
    252         mMaxDurationMs = maxDurationMs;
    253     }
    254 
    255 
    256     public void setCaptureRate(double fps) {
    257         switch (mState) {
    258             case STATE_RECORD:
    259                 throw new RuntimeException("setCaptureRate cannot be called while recording!");
    260             case STATE_RELEASED:
    261                 throw new RuntimeException("setCaptureRate called on an already released recorder!");
    262             default:
    263                 break;
    264         }
    265 
    266         if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
    267         mCaptureRate = fps;
    268     }
    269 
    270     public void setPreviewDisplay(SurfaceHolder previewSurfaceHolder,
    271                                   int previewWidth,
    272                                   int previewHeight) {
    273         if (mLogVerbose) Log.v(TAG, "setPreviewDisplay (" + this + ")");
    274         switch (mState) {
    275             case STATE_RECORD:
    276                 throw new RuntimeException("setPreviewDisplay cannot be called while recording!");
    277             case STATE_RELEASED:
    278                 throw new RuntimeException("setPreviewDisplay called on an already released recorder!");
    279             default:
    280                 break;
    281         }
    282 
    283         mPreviewSurfaceHolder = previewSurfaceHolder;
    284         mPreviewWidth = previewWidth;
    285         mPreviewHeight = previewHeight;
    286 
    287         switch (mState) {
    288             case STATE_WAITING_FOR_SURFACE:
    289                 startPreview();
    290                 break;
    291             case STATE_STARTING_PREVIEW:
    292             case STATE_PREVIEW:
    293                 initializeEffect(true);
    294                 break;
    295         }
    296     }
    297 
    298     public void setEffect(int effect, Object effectParameter) {
    299         if (mLogVerbose) Log.v(TAG,
    300                                "setEffect: effect ID " + effect +
    301                                ", parameter " + effectParameter.toString() );
    302         switch (mState) {
    303             case STATE_RECORD:
    304                 throw new RuntimeException("setEffect cannot be called while recording!");
    305             case STATE_RELEASED:
    306                 throw new RuntimeException("setEffect called on an already released recorder!");
    307             default:
    308                 break;
    309         }
    310 
    311         mEffect = effect;
    312         mEffectParameter = effectParameter;
    313 
    314         if (mState == STATE_PREVIEW ||
    315                 mState == STATE_STARTING_PREVIEW) {
    316             initializeEffect(false);
    317         }
    318     }
    319 
    320     public interface EffectsListener {
    321         public void onEffectsUpdate(int effectId, int effectMsg);
    322         public void onEffectsError(Exception exception, String filePath);
    323     }
    324 
    325     public void setEffectsListener(EffectsListener listener) {
    326         mEffectsListener = listener;
    327     }
    328 
    329     private void setFaceDetectOrientation() {
    330         if (mCurrentEffect == EFFECT_GOOFY_FACE) {
    331             Filter rotateFilter = mRunner.getGraph().getFilter("rotate");
    332             Filter metaRotateFilter = mRunner.getGraph().getFilter("metarotate");
    333             rotateFilter.setInputValue("rotation", mOrientationHint);
    334             int reverseDegrees = (360 - mOrientationHint) % 360;
    335             metaRotateFilter.setInputValue("rotation", reverseDegrees);
    336         }
    337     }
    338 
    339     private void setRecordingOrientation() {
    340         if ( mState != STATE_RECORD && mRunner != null) {
    341             Point bl = new Point(0, 0);
    342             Point br = new Point(1, 0);
    343             Point tl = new Point(0, 1);
    344             Point tr = new Point(1, 1);
    345             Quad recordingRegion;
    346             if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
    347                 // The back camera is not mirrored, so use a identity transform
    348                 recordingRegion = new Quad(bl, br, tl, tr);
    349             } else {
    350                 // Recording region needs to be tweaked for front cameras, since they
    351                 // mirror their preview
    352                 if (mOrientationHint == 0 || mOrientationHint == 180) {
    353                     // Horizontal flip in landscape
    354                     recordingRegion = new Quad(br, bl, tr, tl);
    355                 } else {
    356                     // Horizontal flip in portrait
    357                     recordingRegion = new Quad(tl, tr, bl, br);
    358                 }
    359             }
    360             Filter recorder = mRunner.getGraph().getFilter("recorder");
    361             recorder.setInputValue("inputRegion", recordingRegion);
    362         }
    363     }
    364     public void setOrientationHint(int degrees) {
    365         switch (mState) {
    366             case STATE_RELEASED:
    367                 throw new RuntimeException(
    368                         "setOrientationHint called on an already released recorder!");
    369             default:
    370                 break;
    371         }
    372         if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
    373         mOrientationHint = degrees;
    374         setFaceDetectOrientation();
    375         setRecordingOrientation();
    376     }
    377 
    378     public void setCameraFacing(int facing) {
    379         switch (mState) {
    380             case STATE_RELEASED:
    381                 throw new RuntimeException(
    382                     "setCameraFacing called on alrady released recorder!");
    383             default:
    384                 break;
    385         }
    386         mCameraFacing = facing;
    387         setRecordingOrientation();
    388     }
    389 
    390     public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
    391         switch (mState) {
    392             case STATE_RECORD:
    393                 throw new RuntimeException("setInfoListener cannot be called while recording!");
    394             case STATE_RELEASED:
    395                 throw new RuntimeException("setInfoListener called on an already released recorder!");
    396             default:
    397                 break;
    398         }
    399         mInfoListener = infoListener;
    400     }
    401 
    402     public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
    403         switch (mState) {
    404             case STATE_RECORD:
    405                 throw new RuntimeException("setErrorListener cannot be called while recording!");
    406             case STATE_RELEASED:
    407                 throw new RuntimeException("setErrorListener called on an already released recorder!");
    408             default:
    409                 break;
    410         }
    411         mErrorListener = errorListener;
    412     }
    413 
    414     private void initializeFilterFramework() {
    415         mGraphEnv = new GraphEnvironment();
    416         mGraphEnv.createGLEnvironment();
    417 
    418         if (mLogVerbose) {
    419             Log.v(TAG, "Effects framework initializing. Recording size "
    420                   + mProfile.videoFrameWidth + ", " + mProfile.videoFrameHeight);
    421         }
    422 
    423         mGraphEnv.addReferences(
    424                 "textureSourceCallback", mSourceReadyCallback,
    425                 "recordingWidth", mProfile.videoFrameWidth,
    426                 "recordingHeight", mProfile.videoFrameHeight,
    427                 "recordingProfile", mProfile,
    428                 "learningDoneListener", mLearningDoneListener,
    429                 "recordingDoneListener", mRecordingDoneListener);
    430         mRunner = null;
    431         mGraphId = -1;
    432         mCurrentEffect = EFFECT_NONE;
    433     }
    434 
    435     private synchronized void initializeEffect(boolean forceReset) {
    436         if (forceReset ||
    437             mCurrentEffect != mEffect ||
    438             mCurrentEffect == EFFECT_BACKDROPPER) {
    439             if (mLogVerbose) {
    440                 Log.v(TAG, "Effect initializing. Preview size "
    441                        + mPreviewWidth + ", " + mPreviewHeight);
    442             }
    443 
    444             mGraphEnv.addReferences(
    445                     "previewSurface", mPreviewSurfaceHolder.getSurface(),
    446                     "previewWidth", mPreviewWidth,
    447                     "previewHeight", mPreviewHeight,
    448                     "orientation", mOrientationHint);
    449             if (mState == STATE_PREVIEW ||
    450                     mState == STATE_STARTING_PREVIEW) {
    451                 // Switching effects while running. Inform video camera.
    452                 sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
    453             }
    454 
    455             switch (mEffect) {
    456                 case EFFECT_GOOFY_FACE:
    457                     mGraphId = mGraphEnv.loadGraph(mContext, R.raw.goofy_face);
    458                     break;
    459                 case EFFECT_BACKDROPPER:
    460                     sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
    461                     mGraphId = mGraphEnv.loadGraph(mContext, R.raw.backdropper);
    462                     break;
    463                 default:
    464                     throw new RuntimeException("Unknown effect ID" + mEffect + "!");
    465             }
    466             mCurrentEffect = mEffect;
    467 
    468             mOldRunner = mRunner;
    469             mRunner = mGraphEnv.getRunner(mGraphId, GraphEnvironment.MODE_ASYNCHRONOUS);
    470             mRunner.setDoneCallback(mRunnerDoneCallback);
    471             if (mLogVerbose) {
    472                 Log.v(TAG, "New runner: " + mRunner
    473                       + ". Old runner: " + mOldRunner);
    474             }
    475             if (mState == STATE_PREVIEW ||
    476                     mState == STATE_STARTING_PREVIEW) {
    477                 // Switching effects while running. Stop existing runner.
    478                 // The stop callback will take care of starting new runner.
    479                 mCameraDevice.stopPreview();
    480                 try {
    481                     mCameraDevice.setPreviewTexture(null);
    482                 } catch(IOException e) {
    483                     throw new RuntimeException("Unable to connect camera to effect input", e);
    484                 }
    485                 mOldRunner.stop();
    486             }
    487         }
    488 
    489         switch (mCurrentEffect) {
    490             case EFFECT_GOOFY_FACE:
    491                 tryEnableVideoStabilization(true);
    492                 Filter goofyFilter = mRunner.getGraph().getFilter("goofyrenderer");
    493                 goofyFilter.setInputValue("currentEffect",
    494                                           ((Integer)mEffectParameter).intValue());
    495                 break;
    496             case EFFECT_BACKDROPPER:
    497                 tryEnableVideoStabilization(false);
    498                 Filter backgroundSrc = mRunner.getGraph().getFilter("background");
    499                 backgroundSrc.setInputValue("sourceUrl",
    500                                             (String)mEffectParameter);
    501                 break;
    502             default:
    503                 break;
    504         }
    505         setFaceDetectOrientation();
    506         setRecordingOrientation();
    507     }
    508 
    509     public synchronized void startPreview() {
    510         if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
    511 
    512         switch (mState) {
    513             case STATE_STARTING_PREVIEW:
    514             case STATE_PREVIEW:
    515                 // Already running preview
    516                 Log.w(TAG, "startPreview called when already running preview");
    517                 return;
    518             case STATE_RECORD:
    519                 throw new RuntimeException("Cannot start preview when already recording!");
    520             case STATE_RELEASED:
    521                 throw new RuntimeException("setEffect called on an already released recorder!");
    522             default:
    523                 break;
    524         }
    525 
    526         if (mEffect == EFFECT_NONE) {
    527             throw new RuntimeException("No effect selected!");
    528         }
    529         if (mEffectParameter == null) {
    530             throw new RuntimeException("No effect parameter provided!");
    531         }
    532         if (mProfile == null) {
    533             throw new RuntimeException("No recording profile provided!");
    534         }
    535         if (mPreviewSurfaceHolder == null) {
    536             if (mLogVerbose) Log.v(TAG, "Passed a null surface holder; waiting for valid one");
    537             mState = STATE_WAITING_FOR_SURFACE;
    538             return;
    539         }
    540         if (mCameraDevice == null) {
    541             throw new RuntimeException("No camera to record from!");
    542         }
    543 
    544         if (mLogVerbose) Log.v(TAG, "Initializing filter graph");
    545 
    546         initializeFilterFramework();
    547 
    548         initializeEffect(true);
    549 
    550         if (mLogVerbose) Log.v(TAG, "Starting filter graph");
    551 
    552         mState = STATE_STARTING_PREVIEW;
    553         mRunner.run();
    554         // Rest of preview startup handled in mSourceReadyCallback
    555     }
    556 
    557     private SurfaceTextureSourceListener mSourceReadyCallback =
    558             new SurfaceTextureSourceListener() {
    559         public void onSurfaceTextureSourceReady(SurfaceTexture source) {
    560             if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
    561             synchronized(EffectsRecorder.this) {
    562                 mTextureSource = source;
    563 
    564                 if (mState == STATE_CONFIGURE) {
    565                     // Stop preview happened while the runner was doing startup tasks
    566                     // Since we haven't started anything up, don't do anything
    567                     // Rest of cleanup will happen in onRunnerDone
    568                     if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping.");
    569                     return;
    570                 }
    571                 if (mState == STATE_RELEASED) {
    572                     // EffectsRecorder has been released, so don't touch the camera device
    573                     // or anything else
    574                     if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping.");
    575                     return;
    576                 }
    577                 if (source == null) {
    578                     if (mState == STATE_PREVIEW ||
    579                             mState == STATE_STARTING_PREVIEW ||
    580                             mState == STATE_RECORD) {
    581                         // A null source here means the graph is shutting down
    582                         // unexpectedly, so we need to turn off preview before
    583                         // the surface texture goes away.
    584                         mCameraDevice.stopPreview();
    585                         try {
    586                             mCameraDevice.setPreviewTexture(null);
    587                         } catch(IOException e) {
    588                             throw new RuntimeException("Unable to disconnect " +
    589                                     "camera from effect input", e);
    590                         }
    591                     }
    592                     return;
    593                 }
    594 
    595                 // Lock AE/AWB to reduce transition flicker
    596                 tryEnable3ALocks(true);
    597 
    598                 mCameraDevice.stopPreview();
    599                 if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
    600                 try {
    601                     mCameraDevice.setPreviewTexture(mTextureSource);
    602                 } catch(IOException e) {
    603                     throw new RuntimeException("Unable to connect camera to effect input", e);
    604                 }
    605 
    606                 mCameraDevice.startPreview();
    607 
    608                 // Unlock AE/AWB after preview started
    609                 tryEnable3ALocks(false);
    610 
    611                 mState = STATE_PREVIEW;
    612 
    613                 if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
    614             }
    615         }
    616     };
    617 
    618     private LearningDoneListener mLearningDoneListener =
    619             new LearningDoneListener() {
    620         public void onLearningDone(BackDropperFilter filter) {
    621             if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
    622             // Called in a processing thread, so have to post message back to UI
    623             // thread
    624             sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
    625             enable3ALocks(true);
    626         }
    627     };
    628 
    629     // A callback to finalize the media after the recording is done.
    630     private OnRecordingDoneListener mRecordingDoneListener =
    631             new OnRecordingDoneListener() {
    632         // Forward the callback to the VideoCamera object (as an asynchronous event).
    633         public void onRecordingDone() {
    634             if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
    635             sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
    636         }
    637     };
    638 
    639     public synchronized void startRecording() {
    640         if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
    641 
    642         switch (mState) {
    643             case STATE_RECORD:
    644                 throw new RuntimeException("Already recording, cannot begin anew!");
    645             case STATE_RELEASED:
    646                 throw new RuntimeException("startRecording called on an already released recorder!");
    647             default:
    648                 break;
    649         }
    650 
    651         if ((mOutputFile == null) && (mFd == null)) {
    652             throw new RuntimeException("No output file name or descriptor provided!");
    653         }
    654 
    655         if (mState == STATE_CONFIGURE) {
    656             startPreview();
    657         }
    658 
    659         Filter recorder = mRunner.getGraph().getFilter("recorder");
    660         if (mFd != null) {
    661             recorder.setInputValue("outputFileDescriptor", mFd);
    662         } else {
    663             recorder.setInputValue("outputFile", mOutputFile);
    664         }
    665         // It is ok to set the audiosource without checking for timelapse here
    666         // since that check will be done in the MediaEncoderFilter itself
    667         recorder.setInputValue("audioSource", MediaRecorder.AudioSource.CAMCORDER);
    668 
    669         recorder.setInputValue("recordingProfile", mProfile);
    670         recorder.setInputValue("orientationHint", mOrientationHint);
    671         // Important to set the timelapseinterval to 0 if the capture rate is not >0
    672         // since the recorder does not get created every time the recording starts.
    673         // The recorder infers whether the capture is timelapsed based on the value of
    674         // this interval
    675         boolean captureTimeLapse = mCaptureRate > 0;
    676         if (captureTimeLapse) {
    677             double timeBetweenFrameCapture = 1 / mCaptureRate;
    678             recorder.setInputValue("timelapseRecordingIntervalUs",
    679                     (long) (1000000 * timeBetweenFrameCapture));
    680         } else {
    681             recorder.setInputValue("timelapseRecordingIntervalUs", 0L);
    682         }
    683 
    684         if (mInfoListener != null) {
    685             recorder.setInputValue("infoListener", mInfoListener);
    686         }
    687         if (mErrorListener != null) {
    688             recorder.setInputValue("errorListener", mErrorListener);
    689         }
    690         recorder.setInputValue("maxFileSize", mMaxFileSize);
    691         recorder.setInputValue("maxDurationMs", mMaxDurationMs);
    692         recorder.setInputValue("recording", true);
    693         if (mRecordSound != null) mRecordSound.play();
    694         mState = STATE_RECORD;
    695     }
    696 
    697     public synchronized void stopRecording() {
    698         if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
    699 
    700         switch (mState) {
    701             case STATE_CONFIGURE:
    702             case STATE_STARTING_PREVIEW:
    703             case STATE_PREVIEW:
    704                 Log.w(TAG, "StopRecording called when recording not active!");
    705                 return;
    706             case STATE_RELEASED:
    707                 throw new RuntimeException("stopRecording called on released EffectsRecorder!");
    708             default:
    709                 break;
    710         }
    711         Filter recorder = mRunner.getGraph().getFilter("recorder");
    712         recorder.setInputValue("recording", false);
    713         if (mRecordSound != null) mRecordSound.play();
    714         mState = STATE_PREVIEW;
    715     }
    716 
    717     // Stop and release effect resources
    718     public synchronized void stopPreview() {
    719         if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
    720 
    721         switch (mState) {
    722             case STATE_CONFIGURE:
    723                 Log.w(TAG, "StopPreview called when preview not active!");
    724                 return;
    725             case STATE_RELEASED:
    726                 throw new RuntimeException("stopPreview called on released EffectsRecorder!");
    727             default:
    728                 break;
    729         }
    730 
    731         if (mState == STATE_RECORD) {
    732             stopRecording();
    733         }
    734 
    735         mCurrentEffect = EFFECT_NONE;
    736 
    737         mCameraDevice.stopPreview();
    738         try {
    739             mCameraDevice.setPreviewTexture(null);
    740         } catch(IOException e) {
    741             throw new RuntimeException("Unable to connect camera to effect input", e);
    742         }
    743 
    744         mState = STATE_CONFIGURE;
    745         mOldRunner = mRunner;
    746         mRunner.stop();
    747         mRunner = null;
    748         // Rest of stop and release handled in mRunnerDoneCallback
    749     }
    750 
    751     // Try to enable/disable video stabilization if supported; otherwise return false
    752     boolean tryEnableVideoStabilization(boolean toggle) {
    753         Camera.Parameters params = mCameraDevice.getParameters();
    754 
    755         String vstabSupported = params.get("video-stabilization-supported");
    756         if ("true".equals(vstabSupported)) {
    757             if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
    758             params.set("video-stabilization", toggle ? "true" : "false");
    759             mCameraDevice.setParameters(params);
    760             return true;
    761         }
    762         if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
    763         return false;
    764     }
    765 
    766     // Try to enable/disable 3A locks if supported; otherwise return false
    767     boolean tryEnable3ALocks(boolean toggle) {
    768         Camera.Parameters params = mCameraDevice.getParameters();
    769         if (params.isAutoExposureLockSupported() &&
    770             params.isAutoWhiteBalanceLockSupported() ) {
    771             params.setAutoExposureLock(toggle);
    772             params.setAutoWhiteBalanceLock(toggle);
    773             mCameraDevice.setParameters(params);
    774             return true;
    775         }
    776         return false;
    777     }
    778 
    779     // Try to enable/disable 3A locks if supported; otherwise, throw error
    780     // Use this when locks are essential to success
    781     void enable3ALocks(boolean toggle) {
    782         Camera.Parameters params = mCameraDevice.getParameters();
    783         if (!tryEnable3ALocks(toggle)) {
    784             throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
    785         }
    786     }
    787 
    788     private OnRunnerDoneListener mRunnerDoneCallback =
    789             new OnRunnerDoneListener() {
    790         public void onRunnerDone(int result) {
    791             synchronized(EffectsRecorder.this) {
    792                 if (mLogVerbose) {
    793                     Log.v(TAG,
    794                           "Graph runner done (" + EffectsRecorder.this
    795                           + ", mRunner " + mRunner
    796                           + ", mOldRunner " + mOldRunner + ")");
    797                 }
    798                 if (result == GraphRunner.RESULT_ERROR) {
    799                     // Handle error case
    800                     Log.e(TAG, "Error running filter graph!");
    801                     raiseError(mRunner == null ? null : mRunner.getError());
    802                 }
    803                 if (mOldRunner != null) {
    804                     // Tear down old graph if available
    805                     if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
    806                     GLEnvironment glEnv = mGraphEnv.getContext().getGLEnvironment();
    807                     if (glEnv != null && !glEnv.isActive()) {
    808                         glEnv.activate();
    809                     }
    810                     mOldRunner.getGraph().tearDown(mGraphEnv.getContext());
    811                     if (glEnv != null && glEnv.isActive()) {
    812                         glEnv.deactivate();
    813                     }
    814                     mOldRunner = null;
    815                 }
    816                 if (mState == STATE_PREVIEW ||
    817                         mState == STATE_STARTING_PREVIEW) {
    818                     // Switching effects, start up the new runner
    819                     if (mLogVerbose) Log.v(TAG, "Previous effect halted, starting new effect.");
    820                     tryEnable3ALocks(false);
    821                     mRunner.run();
    822                 } else if (mState != STATE_RELEASED) {
    823                     // Shutting down effects
    824                     if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
    825                     tryEnable3ALocks(false);
    826                     sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
    827                 } else {
    828                     // STATE_RELEASED - camera will be/has been released as well, do nothing.
    829                 }
    830             }
    831         }
    832     };
    833 
    834     // Indicates that all camera/recording activity needs to halt
    835     public synchronized void release() {
    836         if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
    837 
    838         switch (mState) {
    839             case STATE_RECORD:
    840             case STATE_STARTING_PREVIEW:
    841             case STATE_PREVIEW:
    842                 stopPreview();
    843                 // Fall-through
    844             default:
    845                 mRecordSound.release();
    846                 mState = STATE_RELEASED;
    847                 break;
    848         }
    849     }
    850 
    851     private void sendMessage(final int effect, final int msg) {
    852         if (mEffectsListener != null) {
    853             mHandler.post(new Runnable() {
    854                 public void run() {
    855                     mEffectsListener.onEffectsUpdate(effect, msg);
    856                 }
    857             });
    858         }
    859     }
    860 
    861     private void raiseError(final Exception exception) {
    862         if (mEffectsListener != null) {
    863             mHandler.post(new Runnable() {
    864                 public void run() {
    865                     if (mFd != null) {
    866                         mEffectsListener.onEffectsError(exception, null);
    867                     } else {
    868                         mEffectsListener.onEffectsError(exception, mOutputFile);
    869                     }
    870                 }
    871             });
    872         }
    873     }
    874 
    875 }
    876