Home | History | Annotate | Download | only in webrtc
      1 /*
      2  * libjingle
      3  * Copyright 2015 Google Inc.
      4  *
      5  * Redistribution and use in source and binary forms, with or without
      6  * modification, are permitted provided that the following conditions are met:
      7  *
      8  *  1. Redistributions of source code must retain the above copyright notice,
      9  *     this list of conditions and the following disclaimer.
     10  *  2. Redistributions in binary form must reproduce the above copyright notice,
     11  *     this list of conditions and the following disclaimer in the documentation
     12  *     and/or other materials provided with the distribution.
     13  *  3. The name of the author may not be used to endorse or promote products
     14  *     derived from this software without specific prior written permission.
     15  *
     16  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
     17  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
     19  * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     20  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
     21  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
     22  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     23  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
     24  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
     25  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     26  */
     27 
     28 package org.webrtc;
     29 
     30 import android.graphics.Point;
     31 import android.opengl.GLES20;
     32 import android.opengl.Matrix;
     33 
     34 import java.nio.ByteBuffer;
     35 
     36 /**
     37  * Static helper functions for renderer implementations.
     38  */
     39 public class RendererCommon {
     40   /** Interface for reporting rendering events. */
     41   public static interface RendererEvents {
     42     /**
     43      * Callback fired once first frame is rendered.
     44      */
     45     public void onFirstFrameRendered();
     46 
     47     /**
     48      * Callback fired when rendered frame resolution or rotation has changed.
     49      */
     50     public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
     51   }
     52 
     53   /** Interface for rendering frames on an EGLSurface. */
     54   public static interface GlDrawer {
     55     /**
     56      * Functions for drawing frames with different sources. The rendering surface target is
     57      * implied by the current EGL context of the calling thread and requires no explicit argument.
     58      * The coordinates specify the viewport location on the surface target.
     59      */
     60     void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height);
     61     void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height);
     62     void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height);
     63 
     64     /**
     65      * Release all GL resources. This needs to be done manually, otherwise resources may leak.
     66      */
     67     void release();
     68   }
     69 
     70   /**
     71    * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
     72    * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
     73    */
     74   public static class YuvUploader {
     75     // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
     76     // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
     77     // that handles stride and compare performance with intermediate copy.
     78     private ByteBuffer copyBuffer;
     79 
     80     /**
     81      * Upload |planes| into |outputYuvTextures|, taking stride into consideration.
     82      * |outputYuvTextures| must have been generated in advance.
     83      */
     84     public void uploadYuvData(
     85         int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
     86       final int[] planeWidths = new int[] {width, width / 2, width / 2};
     87       final int[] planeHeights = new int[] {height, height / 2, height / 2};
     88       // Make a first pass to see if we need a temporary copy buffer.
     89       int copyCapacityNeeded = 0;
     90       for (int i = 0; i < 3; ++i) {
     91         if (strides[i] > planeWidths[i]) {
     92           copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
     93         }
     94       }
     95       // Allocate copy buffer if necessary.
     96       if (copyCapacityNeeded > 0
     97           && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
     98         copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
     99       }
    100       // Upload each plane.
    101       for (int i = 0; i < 3; ++i) {
    102         GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
    103         GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
    104         // GLES only accepts packed data, i.e. stride == planeWidth.
    105         final ByteBuffer packedByteBuffer;
    106         if (strides[i] == planeWidths[i]) {
    107           // Input is packed already.
    108           packedByteBuffer = planes[i];
    109         } else {
    110           VideoRenderer.nativeCopyPlane(
    111               planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
    112           packedByteBuffer = copyBuffer;
    113         }
    114         GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
    115             planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
    116       }
    117     }
    118   }
    119 
    120   // Types of video scaling:
    121   // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
    122   //    maintaining the aspect ratio (black borders may be displayed).
    123   // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
    124   //    maintaining the aspect ratio. Some portion of the video frame may be
    125   //    clipped.
    126   // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
    127   // possible of the view while maintaining aspect ratio, under the constraint that at least
    128   // |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
    129   public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
    130   // The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
    131   // This limits excessive cropping when adjusting display size.
    132   private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
    133   public static final float[] identityMatrix() {
    134     return new float[] {
    135         1, 0, 0, 0,
    136         0, 1, 0, 0,
    137         0, 0, 1, 0,
    138         0, 0, 0, 1};
    139   }
    140   // Matrix with transform y' = 1 - y.
    141   public static final float[] verticalFlipMatrix() {
    142     return new float[] {
    143         1,  0, 0, 0,
    144         0, -1, 0, 0,
    145         0,  0, 1, 0,
    146         0,  1, 0, 1};
    147   }
    148 
    149   // Matrix with transform x' = 1 - x.
    150   public static final float[] horizontalFlipMatrix() {
    151     return new float[] {
    152         -1, 0, 0, 0,
    153          0, 1, 0, 0,
    154          0, 0, 1, 0,
    155          1, 0, 0, 1};
    156   }
    157 
    158   /**
    159    * Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
    160    * clockwise when rendered.
    161    */
    162   public static float[] rotateTextureMatrix(float[] textureMatrix, float rotationDegree) {
    163     final float[] rotationMatrix = new float[16];
    164     Matrix.setRotateM(rotationMatrix, 0, rotationDegree, 0, 0, 1);
    165     adjustOrigin(rotationMatrix);
    166     return multiplyMatrices(textureMatrix, rotationMatrix);
    167   }
    168 
    169   /**
    170    * Returns new matrix with the result of a * b.
    171    */
    172   public static float[] multiplyMatrices(float[] a, float[] b) {
    173     final float[] resultMatrix = new float[16];
    174     Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
    175     return resultMatrix;
    176   }
    177 
    178   /**
    179    * Returns layout transformation matrix that applies an optional mirror effect and compensates
    180    * for video vs display aspect ratio.
    181    */
    182   public static float[] getLayoutMatrix(
    183       boolean mirror, float videoAspectRatio, float displayAspectRatio) {
    184     float scaleX = 1;
    185     float scaleY = 1;
    186     // Scale X or Y dimension so that video and display size have same aspect ratio.
    187     if (displayAspectRatio > videoAspectRatio) {
    188       scaleY = videoAspectRatio / displayAspectRatio;
    189     } else {
    190       scaleX = displayAspectRatio / videoAspectRatio;
    191     }
    192     // Apply optional horizontal flip.
    193     if (mirror) {
    194       scaleX *= -1;
    195     }
    196     final float matrix[] = new float[16];
    197     Matrix.setIdentityM(matrix, 0);
    198     Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
    199     adjustOrigin(matrix);
    200     return matrix;
    201   }
    202 
    203   /**
    204    * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
    205    */
    206   public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
    207       int maxDisplayWidth, int maxDisplayHeight) {
    208     return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
    209         maxDisplayWidth, maxDisplayHeight);
    210   }
    211 
    212   /**
    213    * Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
    214    * that are in the range 0 to 1.
    215    */
    216   private static void adjustOrigin(float[] matrix) {
    217     // Note that OpenGL is using column-major order.
    218     // Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
    219     matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
    220     matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
    221     // Post translate with 0.5 to move coordinates to range [0, 1].
    222     matrix[12] += 0.5f;
    223     matrix[13] += 0.5f;
    224   }
    225 
    226   /**
    227    * Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
    228    * that must remain visible.
    229    */
    230   private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
    231     switch (scalingType) {
    232       case SCALE_ASPECT_FIT:
    233         return 1.0f;
    234       case SCALE_ASPECT_FILL:
    235         return 0.0f;
    236       case SCALE_ASPECT_BALANCED:
    237         return BALANCED_VISIBLE_FRACTION;
    238       default:
    239         throw new IllegalArgumentException();
    240     }
    241   }
    242 
    243   /**
    244    * Calculate display size based on minimum fraction of the video that must remain visible,
    245    * video aspect ratio, and maximum display size.
    246    */
    247   private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
    248       int maxDisplayWidth, int maxDisplayHeight) {
    249     // If there is no constraint on the amount of cropping, fill the allowed display area.
    250     if (minVisibleFraction == 0 || videoAspectRatio == 0) {
    251       return new Point(maxDisplayWidth, maxDisplayHeight);
    252     }
    253     // Each dimension is constrained on max display size and how much we are allowed to crop.
    254     final int width = Math.min(maxDisplayWidth,
    255         Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
    256     final int height = Math.min(maxDisplayHeight,
    257         Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
    258     return new Point(width, height);
    259   }
    260 }
    261