Home | History | Annotate | Download | only in android
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 #include "config.h"
     17 #include "MediaTexture.h"
     18 #include "TilesManager.h"
     19 #include "GLUtils.h"
     20 #include "MediaListener.h"
     21 
     22 #if USE(ACCELERATED_COMPOSITING)
     23 
     24 #include <android/native_window.h>
     25 #include <gui/SurfaceTexture.h>
     26 #include <gui/SurfaceTextureClient.h>
     27 #include <wtf/CurrentTime.h>
     28 #include <JNIUtility.h>
     29 #include "WebCoreJni.h"
     30 
     31 #define LAYER_DEBUG
     32 #undef LAYER_DEBUG
     33 
     34 #ifdef DEBUG
     35 
     36 #include <cutils/log.h>
     37 #include <wtf/text/CString.h>
     38 
     39 #undef XLOG
     40 #define XLOG(...) android_printLog(ANDROID_LOG_DEBUG, "MediaTexture", __VA_ARGS__)
     41 
     42 #else
     43 
     44 #undef XLOG
     45 #define XLOG(...)
     46 
     47 #endif // DEBUG
     48 
     49 // Limits the number of ANativeWindows that can be allocated for video playback.
     50 // The limit is currently set to 2 as that is the current max number of
     51 // simultaneous HW decodes that our OMX implementation allows.  This forces the
     52 // media producer to use their own SW decoders for subsequent video streams.
     53 #define MAX_WINDOW_COUNT 2
     54 
     55 namespace WebCore {
     56 
     57 MediaTexture::MediaTexture(jobject webViewRef) : android::LightRefBase<MediaTexture>()
     58 {
     59     if (webViewRef) {
     60         JNIEnv* env = JSC::Bindings::getJNIEnv();
     61         m_weakWebViewRef = env->NewWeakGlobalRef(webViewRef);
     62     } else {
     63         m_weakWebViewRef = 0;
     64     }
     65 
     66     m_contentTexture = 0;
     67     m_isContentInverted = false;
     68     m_newWindowRequest = false;
     69 }
     70 
     71 MediaTexture::~MediaTexture()
     72 {
     73     if (m_contentTexture)
     74         deleteTexture(m_contentTexture, true);
     75     for (unsigned int i = 0; i < m_videoTextures.size(); i++) {
     76         deleteTexture(m_videoTextures[i], true);
     77     }
     78 
     79     if (m_weakWebViewRef) {
     80         JNIEnv* env = JSC::Bindings::getJNIEnv();
     81         env->DeleteWeakGlobalRef(m_weakWebViewRef);
     82     }
     83 }
     84 
     85 bool MediaTexture::isContentInverted()
     86 {
     87     android::Mutex::Autolock lock(m_mediaLock);
     88     return m_isContentInverted;
     89 }
     90 void MediaTexture::invertContents(bool invertContent)
     91 {
     92     android::Mutex::Autolock lock(m_mediaLock);
     93     m_isContentInverted = invertContent;
     94 }
     95 
     96 void MediaTexture::initNativeWindowIfNeeded()
     97 {
     98     {
     99         android::Mutex::Autolock lock(m_mediaLock);
    100 
    101         // check to see if there are any unused textures to delete
    102         if (m_unusedTextures.size() != 0) {
    103             for (unsigned int i = 0; i < m_unusedTextures.size(); i++) {
    104                 glDeleteTextures(1, &m_unusedTextures[i]);
    105             }
    106             m_unusedTextures.clear();
    107         }
    108 
    109         // create a content texture if none exists
    110         if (!m_contentTexture) {
    111             m_contentTexture = createTexture();
    112 
    113             // send a message to the WebKit thread to notify the plugin that it can draw
    114             if (m_weakWebViewRef) {
    115                 JNIEnv* env = JSC::Bindings::getJNIEnv();
    116                 jobject localWebViewRef = env->NewLocalRef(m_weakWebViewRef);
    117                 if (localWebViewRef) {
    118                     jclass wvClass = env->GetObjectClass(localWebViewRef);
    119                     jmethodID sendPluginDrawMsg =
    120                             env->GetMethodID(wvClass, "sendPluginDrawMsg", "()V");
    121                     env->CallVoidMethod(localWebViewRef, sendPluginDrawMsg);
    122                     env->DeleteLocalRef(wvClass);
    123                     env->DeleteLocalRef(localWebViewRef);
    124                 }
    125                 checkException(env);
    126             }
    127         }
    128 
    129         // finally create a video texture if needed
    130         if (!m_newWindowRequest)
    131             return;
    132 
    133         // add the texture and add it to the list
    134         TextureWrapper* videoTexture = createTexture();
    135         m_videoTextures.append(videoTexture);
    136 
    137         // setup the state variables to signal the other thread
    138         m_newWindowRequest = false;
    139         m_newWindow = videoTexture->nativeWindow;
    140     }
    141 
    142     // signal the WebKit thread in case it is waiting
    143     m_newMediaRequestCond.signal();
    144 }
    145 
    146 void MediaTexture::draw(const TransformationMatrix& contentMatrix,
    147           const TransformationMatrix& videoMatrix,
    148           const SkRect& mediaBounds)
    149 {
    150     android::Mutex::Autolock lock(m_mediaLock);
    151 
    152     if (mediaBounds.isEmpty())
    153         return;
    154 
    155     // draw all the video textures first
    156     for (unsigned int i = 0; i < m_videoTextures.size(); i++) {
    157 
    158         TextureWrapper* video = m_videoTextures[i];
    159 
    160         if (!video->surfaceTexture.get() || video->dimensions.isEmpty()
    161                 || !video->mediaListener->isFrameAvailable())
    162             continue;
    163 
    164         video->surfaceTexture->updateTexImage();
    165 
    166         float surfaceMatrix[16];
    167         video->surfaceTexture->getTransformMatrix(surfaceMatrix);
    168 
    169         SkRect dimensions = video->dimensions;
    170         dimensions.offset(mediaBounds.fLeft, mediaBounds.fTop);
    171 
    172 #ifdef DEBUG
    173         if (!mediaBounds.contains(dimensions)) {
    174             XLOG("The video exceeds is parent's bounds.");
    175         }
    176 #endif // DEBUG
    177 
    178         TilesManager::instance()->shader()->drawVideoLayerQuad(videoMatrix,
    179                 surfaceMatrix, dimensions, video->textureId);
    180     }
    181 
    182     if (!m_contentTexture->mediaListener->isFrameAvailable())
    183         return;
    184 
    185     m_contentTexture->surfaceTexture->updateTexImage();
    186 
    187     sp<GraphicBuffer> buf = m_contentTexture->surfaceTexture->getCurrentBuffer();
    188 
    189     PixelFormat f = buf->getPixelFormat();
    190     // only attempt to use alpha blending if alpha channel exists
    191     bool forceAlphaBlending = !(
    192         PIXEL_FORMAT_RGBX_8888 == f ||
    193         PIXEL_FORMAT_RGB_888 == f ||
    194         PIXEL_FORMAT_RGB_565 == f ||
    195         PIXEL_FORMAT_RGB_332 == f);
    196 
    197     TilesManager::instance()->shader()->drawLayerQuad(contentMatrix,
    198                                                       mediaBounds,
    199                                                       m_contentTexture->textureId,
    200                                                       1.0f, forceAlphaBlending,
    201                                                       GL_TEXTURE_EXTERNAL_OES);
    202 }
    203 
    204 ANativeWindow* MediaTexture::requestNativeWindowForVideo()
    205 {
    206     android::Mutex::Autolock lock(m_mediaLock);
    207 
    208     // the window was not ready before the timeout so return it this time
    209     if (ANativeWindow* window = m_newWindow.get()) {
    210         m_newWindow.clear();
    211         return window;
    212     }
    213 
    214     // we only allow for so many textures, so return NULL if we exceed that limit
    215     else if (m_videoTextures.size() >= MAX_WINDOW_COUNT) {
    216         return 0;
    217     }
    218 
    219     m_newWindowRequest = true;
    220 
    221     // post an inval message to the UI thread to fulfill the request
    222     if (m_weakWebViewRef) {
    223         JNIEnv* env = JSC::Bindings::getJNIEnv();
    224         jobject localWebViewRef = env->NewLocalRef(m_weakWebViewRef);
    225         if (localWebViewRef) {
    226             jclass wvClass = env->GetObjectClass(localWebViewRef);
    227             jmethodID postInvalMethod = env->GetMethodID(wvClass, "postInvalidate", "()V");
    228             env->CallVoidMethod(localWebViewRef, postInvalMethod);
    229             env->DeleteLocalRef(wvClass);
    230             env->DeleteLocalRef(localWebViewRef);
    231         }
    232         checkException(env);
    233     }
    234 
    235     //block until the request can be fulfilled or we time out
    236     bool timedOut = false;
    237     while (m_newWindowRequest && !timedOut) {
    238         int ret = m_newMediaRequestCond.waitRelative(m_mediaLock, 500000000); // .5 sec
    239         timedOut = ret == TIMED_OUT;
    240     }
    241 
    242     // if the window is ready then return it otherwise return NULL
    243     if (ANativeWindow* window = m_newWindow.get()) {
    244         m_newWindow.clear();
    245         return window;
    246     }
    247     return 0;
    248 }
    249 
    250 ANativeWindow* MediaTexture::getNativeWindowForContent()
    251 {
    252     android::Mutex::Autolock lock(m_mediaLock);
    253     if (m_contentTexture)
    254         return m_contentTexture->nativeWindow.get();
    255     else
    256         return 0;
    257 }
    258 
    259 void MediaTexture::releaseNativeWindow(const ANativeWindow* window)
    260 {
    261     android::Mutex::Autolock lock(m_mediaLock);
    262     for (unsigned int i = 0; i < m_videoTextures.size(); i++) {
    263         if (m_videoTextures[i]->nativeWindow.get() == window) {
    264             deleteTexture(m_videoTextures[i]);
    265             m_videoTextures.remove(i);
    266             break;
    267         }
    268     }
    269 }
    270 
    271 void MediaTexture::setDimensions(const ANativeWindow* window,
    272                                  const SkRect& dimensions)
    273 {
    274     android::Mutex::Autolock lock(m_mediaLock);
    275     for (unsigned int i = 0; i < m_videoTextures.size(); i++) {
    276         if (m_videoTextures[i]->nativeWindow.get() == window) {
    277             m_videoTextures[i]->dimensions = dimensions;
    278             break;
    279         }
    280     }
    281 }
    282 
    283 void MediaTexture::setFramerateCallback(const ANativeWindow* window,
    284                                         FramerateCallbackProc callback)
    285 {
    286     XLOG("Release ANW %p (%p):(%p)", this, m_surfaceTexture.get(), m_surfaceTextureClient.get());
    287     android::Mutex::Autolock lock(m_mediaLock);
    288     for (unsigned int i = 0; i < m_videoTextures.size(); i++) {
    289         if (m_videoTextures[i]->nativeWindow.get() == window) {
    290             m_videoTextures[i]->mediaListener->setFramerateCallback(callback);
    291             break;
    292         }
    293     }
    294 }
    295 
    296 MediaTexture::TextureWrapper* MediaTexture::createTexture()
    297 {
    298     TextureWrapper* wrapper = new TextureWrapper();
    299 
    300     // populate the wrapper
    301     glGenTextures(1, &wrapper->textureId);
    302     wrapper->surfaceTexture = new android::SurfaceTexture(wrapper->textureId);
    303     wrapper->nativeWindow = new android::SurfaceTextureClient(wrapper->surfaceTexture);
    304     wrapper->dimensions.setEmpty();
    305 
    306     // setup callback
    307     wrapper->mediaListener = new MediaListener(m_weakWebViewRef,
    308                                                wrapper->surfaceTexture,
    309                                                wrapper->nativeWindow);
    310     wrapper->surfaceTexture->setFrameAvailableListener(wrapper->mediaListener);
    311 
    312     return wrapper;
    313 }
    314 
    315 void MediaTexture::deleteTexture(TextureWrapper* texture, bool force)
    316 {
    317     if (texture->surfaceTexture.get())
    318         texture->surfaceTexture->setFrameAvailableListener(0);
    319 
    320     if (force)
    321         glDeleteTextures(1, &texture->textureId);
    322     else
    323         m_unusedTextures.append(texture->textureId);
    324 
    325     // clear the strong pointer references
    326     texture->mediaListener.clear();
    327     texture->nativeWindow.clear();
    328     texture->surfaceTexture.clear();
    329 
    330     delete texture;
    331 }
    332 
    333 } // namespace WebCore
    334 
    335 #endif // USE(ACCELERATED_COMPOSITING)
    336