Home | History | Annotate | Download | only in android
      1 /*
      2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
      3  *
      4  *  Use of this source code is governed by a BSD-style license
      5  *  that can be found in the LICENSE file in the root of the source
      6  *  tree. An additional intellectual property rights grant can be found
      7  *  in the file PATENTS.  All contributing project authors may
      8  *  be found in the AUTHORS file in the root of the source tree.
      9  */
     10 
     11 #include <GLES2/gl2.h>
     12 #include <GLES2/gl2ext.h>
     13 
     14 #include <stdio.h>
     15 #include <stdlib.h>
     16 
     17 #include "webrtc/modules/video_render/android/video_render_opengles20.h"
     18 
     19 //#define ANDROID_LOG
     20 
     21 #ifdef ANDROID_LOG
     22 #include <android/log.h>
     23 #include <stdio.h>
     24 
     25 #undef WEBRTC_TRACE
     26 #define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
     27 #else
     28 #include "webrtc/system_wrappers/interface/trace.h"
     29 #endif
     30 
     31 namespace webrtc {
     32 
     33 const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 };
     34 
     35 const char VideoRenderOpenGles20::g_vertextShader[] = {
     36   "attribute vec4 aPosition;\n"
     37   "attribute vec2 aTextureCoord;\n"
     38   "varying vec2 vTextureCoord;\n"
     39   "void main() {\n"
     40   "  gl_Position = aPosition;\n"
     41   "  vTextureCoord = aTextureCoord;\n"
     42   "}\n" };
     43 
     44 // The fragment shader.
     45 // Do YUV to RGB565 conversion.
     46 const char VideoRenderOpenGles20::g_fragmentShader[] = {
     47   "precision mediump float;\n"
     48   "uniform sampler2D Ytex;\n"
     49   "uniform sampler2D Utex,Vtex;\n"
     50   "varying vec2 vTextureCoord;\n"
     51   "void main(void) {\n"
     52   "  float nx,ny,r,g,b,y,u,v;\n"
     53   "  mediump vec4 txl,ux,vx;"
     54   "  nx=vTextureCoord[0];\n"
     55   "  ny=vTextureCoord[1];\n"
     56   "  y=texture2D(Ytex,vec2(nx,ny)).r;\n"
     57   "  u=texture2D(Utex,vec2(nx,ny)).r;\n"
     58   "  v=texture2D(Vtex,vec2(nx,ny)).r;\n"
     59 
     60   //"  y = v;\n"+
     61   "  y=1.1643*(y-0.0625);\n"
     62   "  u=u-0.5;\n"
     63   "  v=v-0.5;\n"
     64 
     65   "  r=y+1.5958*v;\n"
     66   "  g=y-0.39173*u-0.81290*v;\n"
     67   "  b=y+2.017*u;\n"
     68   "  gl_FragColor=vec4(r,g,b,1.0);\n"
     69   "}\n" };
     70 
     71 VideoRenderOpenGles20::VideoRenderOpenGles20(int32_t id) :
     72     _id(id),
     73     _textureWidth(-1),
     74     _textureHeight(-1) {
     75   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
     76                __FUNCTION__, (int) _id);
     77 
     78   const GLfloat vertices[20] = {
     79     // X, Y, Z, U, V
     80     -1, -1, 0, 0, 1, // Bottom Left
     81     1, -1, 0, 1, 1, //Bottom Right
     82     1, 1, 0, 1, 0, //Top Right
     83     -1, 1, 0, 0, 0 }; //Top Left
     84 
     85   memcpy(_vertices, vertices, sizeof(_vertices));
     86 }
     87 
     88 VideoRenderOpenGles20::~VideoRenderOpenGles20() {
     89 }
     90 
     91 int32_t VideoRenderOpenGles20::Setup(int32_t width, int32_t height) {
     92   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
     93                "%s: width %d, height %d", __FUNCTION__, (int) width,
     94                (int) height);
     95 
     96   printGLString("Version", GL_VERSION);
     97   printGLString("Vendor", GL_VENDOR);
     98   printGLString("Renderer", GL_RENDERER);
     99   printGLString("Extensions", GL_EXTENSIONS);
    100 
    101   int maxTextureImageUnits[2];
    102   int maxTextureSize[2];
    103   glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits);
    104   glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize);
    105 
    106   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
    107                "%s: number of textures %d, size %d", __FUNCTION__,
    108                (int) maxTextureImageUnits[0], (int) maxTextureSize[0]);
    109 
    110   _program = createProgram(g_vertextShader, g_fragmentShader);
    111   if (!_program) {
    112     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
    113                  "%s: Could not create program", __FUNCTION__);
    114     return -1;
    115   }
    116 
    117   int positionHandle = glGetAttribLocation(_program, "aPosition");
    118   checkGlError("glGetAttribLocation aPosition");
    119   if (positionHandle == -1) {
    120     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
    121                  "%s: Could not get aPosition handle", __FUNCTION__);
    122     return -1;
    123   }
    124 
    125   int textureHandle = glGetAttribLocation(_program, "aTextureCoord");
    126   checkGlError("glGetAttribLocation aTextureCoord");
    127   if (textureHandle == -1) {
    128     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
    129                  "%s: Could not get aTextureCoord handle", __FUNCTION__);
    130     return -1;
    131   }
    132 
    133   // set the vertices array in the shader
    134   // _vertices contains 4 vertices with 5 coordinates.
    135   // 3 for (xyz) for the vertices and 2 for the texture
    136   glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false,
    137                         5 * sizeof(GLfloat), _vertices);
    138   checkGlError("glVertexAttribPointer aPosition");
    139 
    140   glEnableVertexAttribArray(positionHandle);
    141   checkGlError("glEnableVertexAttribArray positionHandle");
    142 
    143   // set the texture coordinate array in the shader
    144   // _vertices contains 4 vertices with 5 coordinates.
    145   // 3 for (xyz) for the vertices and 2 for the texture
    146   glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
    147                         * sizeof(GLfloat), &_vertices[3]);
    148   checkGlError("glVertexAttribPointer maTextureHandle");
    149   glEnableVertexAttribArray(textureHandle);
    150   checkGlError("glEnableVertexAttribArray textureHandle");
    151 
    152   glUseProgram(_program);
    153   int i = glGetUniformLocation(_program, "Ytex");
    154   checkGlError("glGetUniformLocation");
    155   glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
    156   checkGlError("glUniform1i Ytex");
    157 
    158   i = glGetUniformLocation(_program, "Utex");
    159   checkGlError("glGetUniformLocation Utex");
    160   glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
    161   checkGlError("glUniform1i Utex");
    162 
    163   i = glGetUniformLocation(_program, "Vtex");
    164   checkGlError("glGetUniformLocation");
    165   glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
    166   checkGlError("glUniform1i");
    167 
    168   glViewport(0, 0, width, height);
    169   checkGlError("glViewport");
    170   return 0;
    171 }
    172 
    173 // SetCoordinates
    174 // Sets the coordinates where the stream shall be rendered.
    175 // Values must be between 0 and 1.
    176 int32_t VideoRenderOpenGles20::SetCoordinates(int32_t zOrder,
    177                                               const float left,
    178                                               const float top,
    179                                               const float right,
    180                                               const float bottom) {
    181   if ((top > 1 || top < 0) || (right > 1 || right < 0) ||
    182       (bottom > 1 || bottom < 0) || (left > 1 || left < 0)) {
    183     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
    184                  "%s: Wrong coordinates", __FUNCTION__);
    185     return -1;
    186   }
    187 
    188   //  X, Y, Z, U, V
    189   // -1, -1, 0, 0, 1, // Bottom Left
    190   //  1, -1, 0, 1, 1, //Bottom Right
    191   //  1,  1, 0, 1, 0, //Top Right
    192   // -1,  1, 0, 0, 0  //Top Left
    193 
    194   // Bottom Left
    195   _vertices[0] = (left * 2) - 1;
    196   _vertices[1] = -1 * (2 * bottom) + 1;
    197   _vertices[2] = zOrder;
    198 
    199   //Bottom Right
    200   _vertices[5] = (right * 2) - 1;
    201   _vertices[6] = -1 * (2 * bottom) + 1;
    202   _vertices[7] = zOrder;
    203 
    204   //Top Right
    205   _vertices[10] = (right * 2) - 1;
    206   _vertices[11] = -1 * (2 * top) + 1;
    207   _vertices[12] = zOrder;
    208 
    209   //Top Left
    210   _vertices[15] = (left * 2) - 1;
    211   _vertices[16] = -1 * (2 * top) + 1;
    212   _vertices[17] = zOrder;
    213 
    214   return 0;
    215 }
    216 
    217 int32_t VideoRenderOpenGles20::Render(const I420VideoFrame& frameToRender) {
    218 
    219   if (frameToRender.IsZeroSize()) {
    220     return -1;
    221   }
    222 
    223   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
    224                __FUNCTION__, (int) _id);
    225 
    226   glUseProgram(_program);
    227   checkGlError("glUseProgram");
    228 
    229   if (_textureWidth != (GLsizei) frameToRender.width() ||
    230       _textureHeight != (GLsizei) frameToRender.height()) {
    231     SetupTextures(frameToRender);
    232   }
    233   UpdateTextures(frameToRender);
    234 
    235   glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices);
    236   checkGlError("glDrawArrays");
    237 
    238   return 0;
    239 }
    240 
    241 GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType,
    242                                          const char* pSource) {
    243   GLuint shader = glCreateShader(shaderType);
    244   if (shader) {
    245     glShaderSource(shader, 1, &pSource, NULL);
    246     glCompileShader(shader);
    247     GLint compiled = 0;
    248     glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
    249     if (!compiled) {
    250       GLint infoLen = 0;
    251       glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
    252       if (infoLen) {
    253         char* buf = (char*) malloc(infoLen);
    254         if (buf) {
    255           glGetShaderInfoLog(shader, infoLen, NULL, buf);
    256           WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
    257                        "%s: Could not compile shader %d: %s",
    258                        __FUNCTION__, shaderType, buf);
    259           free(buf);
    260         }
    261         glDeleteShader(shader);
    262         shader = 0;
    263       }
    264     }
    265   }
    266   return shader;
    267 }
    268 
    269 GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource,
    270                                             const char* pFragmentSource) {
    271   GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
    272   if (!vertexShader) {
    273     return 0;
    274   }
    275 
    276   GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
    277   if (!pixelShader) {
    278     return 0;
    279   }
    280 
    281   GLuint program = glCreateProgram();
    282   if (program) {
    283     glAttachShader(program, vertexShader);
    284     checkGlError("glAttachShader");
    285     glAttachShader(program, pixelShader);
    286     checkGlError("glAttachShader");
    287     glLinkProgram(program);
    288     GLint linkStatus = GL_FALSE;
    289     glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
    290     if (linkStatus != GL_TRUE) {
    291       GLint bufLength = 0;
    292       glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
    293       if (bufLength) {
    294         char* buf = (char*) malloc(bufLength);
    295         if (buf) {
    296           glGetProgramInfoLog(program, bufLength, NULL, buf);
    297           WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
    298                        "%s: Could not link program: %s",
    299                        __FUNCTION__, buf);
    300           free(buf);
    301         }
    302       }
    303       glDeleteProgram(program);
    304       program = 0;
    305     }
    306   }
    307   return program;
    308 }
    309 
    310 void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) {
    311   const char *v = (const char *) glGetString(s);
    312   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n",
    313                name, v);
    314 }
    315 
    316 void VideoRenderOpenGles20::checkGlError(const char* op) {
    317 #ifdef ANDROID_LOG
    318   for (GLint error = glGetError(); error; error = glGetError()) {
    319     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
    320                  "after %s() glError (0x%x)\n", op, error);
    321   }
    322 #else
    323   return;
    324 #endif
    325 }
    326 
    327 static void InitializeTexture(int name, int id, int width, int height) {
    328   glActiveTexture(name);
    329   glBindTexture(GL_TEXTURE_2D, id);
    330   glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    331   glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    332   glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    333   glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    334   glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
    335                GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
    336 }
    337 
    338 void VideoRenderOpenGles20::SetupTextures(const I420VideoFrame& frameToRender) {
    339   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
    340                "%s: width %d, height %d", __FUNCTION__,
    341                frameToRender.width(), frameToRender.height());
    342 
    343   const GLsizei width = frameToRender.width();
    344   const GLsizei height = frameToRender.height();
    345 
    346   glGenTextures(3, _textureIds); //Generate  the Y, U and V texture
    347   InitializeTexture(GL_TEXTURE0, _textureIds[0], width, height);
    348   InitializeTexture(GL_TEXTURE1, _textureIds[1], width / 2, height / 2);
    349   InitializeTexture(GL_TEXTURE2, _textureIds[2], width / 2, height / 2);
    350 
    351   checkGlError("SetupTextures");
    352 
    353   _textureWidth = width;
    354   _textureHeight = height;
    355 }
    356 
    357 // Uploads a plane of pixel data, accounting for stride != width*bpp.
    358 static void GlTexSubImage2D(GLsizei width, GLsizei height, int stride,
    359                             const uint8_t* plane) {
    360   if (stride == width) {
    361     // Yay!  We can upload the entire plane in a single GL call.
    362     glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
    363                     GL_UNSIGNED_BYTE,
    364                     static_cast<const GLvoid*>(plane));
    365   } else {
    366     // Boo!  Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and Android doesn't
    367     // have GL_EXT_unpack_subimage we have to upload a row at a time.  Ick.
    368     for (int row = 0; row < height; ++row) {
    369       glTexSubImage2D(GL_TEXTURE_2D, 0, 0, row, width, 1, GL_LUMINANCE,
    370                       GL_UNSIGNED_BYTE,
    371                       static_cast<const GLvoid*>(plane + (row * stride)));
    372     }
    373   }
    374 }
    375 
    376 void VideoRenderOpenGles20::UpdateTextures(const
    377                                            I420VideoFrame& frameToRender) {
    378   const GLsizei width = frameToRender.width();
    379   const GLsizei height = frameToRender.height();
    380 
    381   glActiveTexture(GL_TEXTURE0);
    382   glBindTexture(GL_TEXTURE_2D, _textureIds[0]);
    383   GlTexSubImage2D(width, height, frameToRender.stride(kYPlane),
    384                   frameToRender.buffer(kYPlane));
    385 
    386   glActiveTexture(GL_TEXTURE1);
    387   glBindTexture(GL_TEXTURE_2D, _textureIds[1]);
    388   GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kUPlane),
    389                   frameToRender.buffer(kUPlane));
    390 
    391   glActiveTexture(GL_TEXTURE2);
    392   glBindTexture(GL_TEXTURE_2D, _textureIds[2]);
    393   GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kVPlane),
    394                   frameToRender.buffer(kVPlane));
    395 
    396   checkGlError("UpdateTextures");
    397 }
    398 
    399 }  // namespace webrtc
    400