Home | History | Annotate | Download | only in videoengine
      1 /*
      2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
      3  *
      4  *  Use of this source code is governed by a BSD-style license
      5  *  that can be found in the LICENSE file in the root of the source
      6  *  tree. An additional intellectual property rights grant can be found
      7  *  in the file PATENTS.  All contributing project authors may
      8  *  be found in the AUTHORS file in the root of the source tree.
      9  */
     10 
     11 package org.webrtc.videoengine;
     12 
     13 import java.io.IOException;
     14 import java.util.concurrent.Exchanger;
     15 
     16 import android.content.Context;
     17 import android.graphics.ImageFormat;
     18 import android.graphics.SurfaceTexture;
     19 import android.hardware.Camera.PreviewCallback;
     20 import android.hardware.Camera;
     21 import android.opengl.GLES11Ext;
     22 import android.opengl.GLES20;
     23 import android.os.Handler;
     24 import android.os.Looper;
     25 import android.os.SystemClock;
     26 import android.util.Log;
     27 import android.view.OrientationEventListener;
     28 import android.view.SurfaceHolder.Callback;
     29 import android.view.SurfaceHolder;
     30 
     31 // Wrapper for android Camera, with support for direct local preview rendering.
     32 // Threading notes: this class is called from ViE C++ code, and from Camera &
     33 // SurfaceHolder Java callbacks.  Since these calls happen on different threads,
     34 // the entry points to this class are all synchronized.  This shouldn't present
     35 // a performance bottleneck because only onPreviewFrame() is called more than
     36 // once (and is called serially on a single thread), so the lock should be
     37 // uncontended.  Note that each of these synchronized methods must check
     38 // |camera| for null to account for having possibly waited for stopCapture() to
     39 // complete.
     40 public class VideoCaptureAndroid implements PreviewCallback, Callback {
     41   private final static String TAG = "WEBRTC-JC";
     42 
     43   private static SurfaceHolder localPreview;
     44   private Camera camera;  // Only non-null while capturing.
     45   private CameraThread cameraThread;
     46   private Handler cameraThreadHandler;
     47   private final int id;
     48   private final Camera.CameraInfo info;
     49   private final OrientationEventListener orientationListener;
     50   private boolean orientationListenerEnabled;
     51   private final long native_capturer;  // |VideoCaptureAndroid*| in C++.
     52   private SurfaceTexture cameraSurfaceTexture;
     53   private int[] cameraGlTextures = null;
     54   // Arbitrary queue depth.  Higher number means more memory allocated & held,
     55   // lower number means more sensitivity to processing time in the client (and
     56   // potentially stalling the capturer if it runs out of buffers to write to).
     57   private final int numCaptureBuffers = 3;
     58   private double averageDurationMs;
     59   private long lastCaptureTimeMs;
     60   private int frameCount;
     61 
     62   // Requests future capturers to send their frames to |localPreview| directly.
     63   public static void setLocalPreview(SurfaceHolder localPreview) {
     64     // It is a gross hack that this is a class-static.  Doing it right would
     65     // mean plumbing this through the C++ API and using it from
     66     // webrtc/examples/android/media_demo's MediaEngine class.
     67     VideoCaptureAndroid.localPreview = localPreview;
     68   }
     69 
     70   public VideoCaptureAndroid(int id, long native_capturer) {
     71     this.id = id;
     72     this.native_capturer = native_capturer;
     73     this.info = new Camera.CameraInfo();
     74     Camera.getCameraInfo(id, info);
     75 
     76     // Must be the last thing in the ctor since we pass a reference to |this|!
     77     final VideoCaptureAndroid self = this;
     78     orientationListener = new OrientationEventListener(GetContext()) {
     79         @Override public void onOrientationChanged(int degrees) {
     80           if (!self.orientationListenerEnabled) {
     81             return;
     82           }
     83           if (degrees == OrientationEventListener.ORIENTATION_UNKNOWN) {
     84             return;
     85           }
     86           if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
     87             degrees = (info.orientation - degrees + 360) % 360;
     88           } else {  // back-facing
     89             degrees = (info.orientation + degrees) % 360;
     90           }
     91           self.OnOrientationChanged(self.native_capturer, degrees);
     92         }
     93       };
     94     // Don't add any code here; see the comment above |self| above!
     95   }
     96 
     97   // Return the global application context.
     98   private static native Context GetContext();
     99   // Request frame rotation post-capture.
    100   private native void OnOrientationChanged(long captureObject, int degrees);
    101 
    102   private class CameraThread extends Thread {
    103     private Exchanger<Handler> handlerExchanger;
    104     public CameraThread(Exchanger<Handler> handlerExchanger) {
    105       this.handlerExchanger = handlerExchanger;
    106     }
    107 
    108     @Override public void run() {
    109       Looper.prepare();
    110       exchange(handlerExchanger, new Handler());
    111       Looper.loop();
    112     }
    113   }
    114 
    115   // Called by native code.  Returns true if capturer is started.
    116   //
    117   // Note that this actually opens the camera, and Camera callbacks run on the
    118   // thread that calls open(), so this is done on the CameraThread.  Since ViE
    119   // API needs a synchronous success return value we wait for the result.
    120   private synchronized boolean startCapture(
    121       final int width, final int height,
    122       final int min_mfps, final int max_mfps) {
    123     Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
    124         min_mfps + ":" + max_mfps);
    125     if (cameraThread != null || cameraThreadHandler != null) {
    126       throw new RuntimeException("Camera thread already started!");
    127     }
    128     Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
    129     cameraThread = new CameraThread(handlerExchanger);
    130     cameraThread.start();
    131     cameraThreadHandler = exchange(handlerExchanger, null);
    132 
    133     final Exchanger<Boolean> result = new Exchanger<Boolean>();
    134     cameraThreadHandler.post(new Runnable() {
    135         @Override public void run() {
    136           startCaptureOnCameraThread(width, height, min_mfps, max_mfps, result);
    137         }
    138       });
    139     boolean startResult = exchange(result, false); // |false| is a dummy value.
    140     orientationListenerEnabled = true;
    141     orientationListener.enable();
    142     return startResult;
    143   }
    144 
    145   private void startCaptureOnCameraThread(
    146       int width, int height, int min_mfps, int max_mfps,
    147       Exchanger<Boolean> result) {
    148     Throwable error = null;
    149     try {
    150       camera = Camera.open(id);
    151 
    152       if (localPreview != null) {
    153         localPreview.addCallback(this);
    154         if (localPreview.getSurface() != null &&
    155             localPreview.getSurface().isValid()) {
    156           camera.setPreviewDisplay(localPreview);
    157         }
    158       } else {
    159         // No local renderer (we only care about onPreviewFrame() buffers, not a
    160         // directly-displayed UI element).  Camera won't capture without
    161         // setPreview{Texture,Display}, so we create a SurfaceTexture and hand
    162         // it over to Camera, but never listen for frame-ready callbacks,
    163         // and never call updateTexImage on it.
    164         try {
    165           cameraGlTextures = new int[1];
    166           // Generate one texture pointer and bind it as an external texture.
    167           GLES20.glGenTextures(1, cameraGlTextures, 0);
    168           GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
    169               cameraGlTextures[0]);
    170           GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
    171               GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
    172           GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
    173               GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    174           GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
    175               GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    176           GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
    177               GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    178 
    179           cameraSurfaceTexture = new SurfaceTexture(cameraGlTextures[0]);
    180           cameraSurfaceTexture.setOnFrameAvailableListener(null);
    181           camera.setPreviewTexture(cameraSurfaceTexture);
    182         } catch (IOException e) {
    183           throw new RuntimeException(e);
    184         }
    185       }
    186 
    187       Camera.Parameters parameters = camera.getParameters();
    188       Log.d(TAG, "isVideoStabilizationSupported: " +
    189           parameters.isVideoStabilizationSupported());
    190       if (parameters.isVideoStabilizationSupported()) {
    191         parameters.setVideoStabilization(true);
    192       }
    193       parameters.setPreviewSize(width, height);
    194       parameters.setPreviewFpsRange(min_mfps, max_mfps);
    195       int format = ImageFormat.NV21;
    196       parameters.setPreviewFormat(format);
    197       camera.setParameters(parameters);
    198       int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
    199       for (int i = 0; i < numCaptureBuffers; i++) {
    200         camera.addCallbackBuffer(new byte[bufSize]);
    201       }
    202       camera.setPreviewCallbackWithBuffer(this);
    203       frameCount = 0;
    204       averageDurationMs = 1000 / max_mfps;
    205       camera.startPreview();
    206       exchange(result, true);
    207       return;
    208     } catch (IOException e) {
    209       error = e;
    210     } catch (RuntimeException e) {
    211       error = e;
    212     }
    213     Log.e(TAG, "startCapture failed", error);
    214     if (camera != null) {
    215       Exchanger<Boolean> resultDropper = new Exchanger<Boolean>();
    216       stopCaptureOnCameraThread(resultDropper);
    217       exchange(resultDropper, false);
    218     }
    219     exchange(result, false);
    220     return;
    221   }
    222 
    223   // Called by native code.  Returns true when camera is known to be stopped.
    224   private synchronized boolean stopCapture() {
    225     Log.d(TAG, "stopCapture");
    226     orientationListener.disable();
    227     orientationListenerEnabled = false;
    228     final Exchanger<Boolean> result = new Exchanger<Boolean>();
    229     cameraThreadHandler.post(new Runnable() {
    230         @Override public void run() {
    231           stopCaptureOnCameraThread(result);
    232         }
    233       });
    234     boolean status = exchange(result, false);  // |false| is a dummy value here.
    235     try {
    236       cameraThread.join();
    237     } catch (InterruptedException e) {
    238       throw new RuntimeException(e);
    239     }
    240     cameraThreadHandler = null;
    241     cameraThread = null;
    242     Log.d(TAG, "stopCapture done");
    243     return status;
    244   }
    245 
    246   private void stopCaptureOnCameraThread(
    247       Exchanger<Boolean> result) {
    248     if (camera == null) {
    249       throw new RuntimeException("Camera is already stopped!");
    250     }
    251     Throwable error = null;
    252     try {
    253       camera.stopPreview();
    254       camera.setPreviewCallbackWithBuffer(null);
    255       if (localPreview != null) {
    256         localPreview.removeCallback(this);
    257         camera.setPreviewDisplay(null);
    258       } else {
    259         camera.setPreviewTexture(null);
    260         cameraSurfaceTexture = null;
    261         if (cameraGlTextures != null) {
    262           GLES20.glDeleteTextures(1, cameraGlTextures, 0);
    263           cameraGlTextures = null;
    264         }
    265       }
    266       camera.release();
    267       camera = null;
    268       exchange(result, true);
    269       Looper.myLooper().quit();
    270       return;
    271     } catch (IOException e) {
    272       error = e;
    273     } catch (RuntimeException e) {
    274       error = e;
    275     }
    276     Log.e(TAG, "Failed to stop camera", error);
    277     exchange(result, false);
    278     Looper.myLooper().quit();
    279     return;
    280   }
    281 
    282   private native void ProvideCameraFrame(
    283       byte[] data, int length, long timeStamp, long captureObject);
    284 
    285   // Called on cameraThread so must not "synchronized".
    286   @Override
    287   public void onPreviewFrame(byte[] data, Camera callbackCamera) {
    288     if (Thread.currentThread() != cameraThread) {
    289       throw new RuntimeException("Camera callback not on camera thread?!?");
    290     }
    291     if (camera == null) {
    292       return;
    293     }
    294     if (camera != callbackCamera) {
    295       throw new RuntimeException("Unexpected camera in callback!");
    296     }
    297     frameCount++;
    298     long captureTimeMs = SystemClock.elapsedRealtime();
    299     if (frameCount > 1) {
    300       double durationMs = captureTimeMs - lastCaptureTimeMs;
    301       averageDurationMs = 0.9 * averageDurationMs + 0.1 * durationMs;
    302       if ((frameCount % 30) == 0) {
    303         Log.d(TAG, "Camera TS " + captureTimeMs +
    304             ". Duration: " + (int)durationMs + " ms. FPS: " +
    305             (int) (1000 / averageDurationMs + 0.5));
    306       }
    307     }
    308     lastCaptureTimeMs = captureTimeMs;
    309     ProvideCameraFrame(data, data.length, captureTimeMs, native_capturer);
    310     camera.addCallbackBuffer(data);
    311   }
    312 
    313   // Sets the rotation of the preview render window.
    314   // Does not affect the captured video image.
    315   // Called by native code.
    316   private synchronized void setPreviewRotation(final int rotation) {
    317     if (camera == null || cameraThreadHandler == null) {
    318       return;
    319     }
    320     final Exchanger<IOException> result = new Exchanger<IOException>();
    321     cameraThreadHandler.post(new Runnable() {
    322         @Override public void run() {
    323           setPreviewRotationOnCameraThread(rotation, result);
    324         }
    325       });
    326     // Use the exchanger below to block this function until
    327     // setPreviewRotationOnCameraThread() completes, holding the synchronized
    328     // lock for the duration.  The exchanged value itself is ignored.
    329     exchange(result, null);
    330   }
    331 
    332   private void setPreviewRotationOnCameraThread(
    333       int rotation, Exchanger<IOException> result) {
    334     Log.v(TAG, "setPreviewRotation:" + rotation);
    335 
    336     int resultRotation = 0;
    337     if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
    338       // This is a front facing camera.  SetDisplayOrientation will flip
    339       // the image horizontally before doing the rotation.
    340       resultRotation = ( 360 - rotation ) % 360; // Compensate for the mirror.
    341     } else {
    342       // Back-facing camera.
    343       resultRotation = rotation;
    344     }
    345     camera.setDisplayOrientation(resultRotation);
    346     exchange(result, null);
    347   }
    348 
    349   @Override
    350   public synchronized void surfaceChanged(
    351       SurfaceHolder holder, int format, int width, int height) {
    352     Log.d(TAG, "VideoCaptureAndroid::surfaceChanged ignored: " +
    353         format + ": " + width + "x" + height);
    354   }
    355 
    356   @Override
    357   public synchronized void surfaceCreated(final SurfaceHolder holder) {
    358     Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
    359     if (camera == null || cameraThreadHandler == null) {
    360       return;
    361     }
    362     final Exchanger<IOException> result = new Exchanger<IOException>();
    363     cameraThreadHandler.post(new Runnable() {
    364         @Override public void run() {
    365           setPreviewDisplayOnCameraThread(holder, result);
    366         }
    367       });
    368     IOException e = exchange(result, null);  // |null| is a dummy value here.
    369     if (e != null) {
    370       throw new RuntimeException(e);
    371     }
    372   }
    373 
    374   @Override
    375   public synchronized void surfaceDestroyed(SurfaceHolder holder) {
    376     Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
    377     if (camera == null || cameraThreadHandler == null) {
    378       return;
    379     }
    380     final Exchanger<IOException> result = new Exchanger<IOException>();
    381     cameraThreadHandler.post(new Runnable() {
    382         @Override public void run() {
    383           setPreviewDisplayOnCameraThread(null, result);
    384         }
    385       });
    386     IOException e = exchange(result, null);  // |null| is a dummy value here.
    387     if (e != null) {
    388       throw new RuntimeException(e);
    389     }
    390   }
    391 
    392   private void setPreviewDisplayOnCameraThread(
    393       SurfaceHolder holder, Exchanger<IOException> result) {
    394     try {
    395       camera.setPreviewDisplay(holder);
    396     } catch (IOException e) {
    397       exchange(result, e);
    398       return;
    399     }
    400     exchange(result, null);
    401     return;
    402   }
    403 
    404   // Exchanges |value| with |exchanger|, converting InterruptedExceptions to
    405   // RuntimeExceptions (since we expect never to see these).
    406   private static <T> T exchange(Exchanger<T> exchanger, T value) {
    407     try {
    408       return exchanger.exchange(value);
    409     } catch (InterruptedException e) {
    410       throw new RuntimeException(e);
    411     }
    412   }
    413 }
    414