Home | History | Annotate | Download | only in webrtc
      1 /*
      2  * libjingle
      3  * Copyright 2014 Google Inc.
      4  *
      5  * Redistribution and use in source and binary forms, with or without
      6  * modification, are permitted provided that the following conditions are met:
      7  *
      8  *  1. Redistributions of source code must retain the above copyright notice,
      9  *     this list of conditions and the following disclaimer.
     10  *  2. Redistributions in binary form must reproduce the above copyright notice,
     11  *     this list of conditions and the following disclaimer in the documentation
     12  *     and/or other materials provided with the distribution.
     13  *  3. The name of the author may not be used to endorse or promote products
     14  *     derived from this software without specific prior written permission.
     15  *
     16  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
     17  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
     19  * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     20  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
     21  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
     22  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     23  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
     24  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
     25  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     26  */
     27 
     28 package org.webrtc;
     29 
     30 import android.graphics.SurfaceTexture;
     31 import android.media.MediaCodec;
     32 import android.media.MediaCodecInfo;
     33 import android.media.MediaCodecInfo.CodecCapabilities;
     34 import android.media.MediaCodecList;
     35 import android.media.MediaFormat;
     36 import android.os.Build;
     37 import android.os.SystemClock;
     38 import android.view.Surface;
     39 
     40 import org.webrtc.Logging;
     41 
     42 import java.nio.ByteBuffer;
     43 import java.util.Arrays;
     44 import java.util.LinkedList;
     45 import java.util.List;
     46 import java.util.concurrent.CountDownLatch;
     47 import java.util.Queue;
     48 import java.util.concurrent.TimeUnit;
     49 
     50 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
     51 // This class is an implementation detail of the Java PeerConnection API.
     52 @SuppressWarnings("deprecation")
     53 public class MediaCodecVideoDecoder {
     54   // This class is constructed, operated, and destroyed by its C++ incarnation,
     55   // so the class and its methods have non-public visibility.  The API this
     56   // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
     57   // possibly to minimize the amount of translation work necessary.
     58 
     59   private static final String TAG = "MediaCodecVideoDecoder";
     60 
     61   // Tracks webrtc::VideoCodecType.
     62   public enum VideoCodecType {
     63     VIDEO_CODEC_VP8,
     64     VIDEO_CODEC_VP9,
     65     VIDEO_CODEC_H264
     66   }
     67 
     68   private static final int DEQUEUE_INPUT_TIMEOUT = 500000;  // 500 ms timeout.
     69   private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
     70   // Active running decoder instance. Set in initDecode() (called from native code)
     71   // and reset to null in release() call.
     72   private static MediaCodecVideoDecoder runningInstance = null;
     73   private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
     74   private static int codecErrors = 0;
     75 
     76   private Thread mediaCodecThread;
     77   private MediaCodec mediaCodec;
     78   private ByteBuffer[] inputBuffers;
     79   private ByteBuffer[] outputBuffers;
     80   private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
     81   private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
     82   private static final String H264_MIME_TYPE = "video/avc";
     83   // List of supported HW VP8 decoders.
     84   private static final String[] supportedVp8HwCodecPrefixes =
     85     {"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
     86   // List of supported HW VP9 decoders.
     87   private static final String[] supportedVp9HwCodecPrefixes =
     88     {"OMX.qcom.", "OMX.Exynos." };
     89   // List of supported HW H.264 decoders.
     90   private static final String[] supportedH264HwCodecPrefixes =
     91     {"OMX.qcom.", "OMX.Intel." };
     92   // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
     93   // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
     94   private static final int
     95     COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
     96   // Allowable color formats supported by codec - in order of preference.
     97   private static final List<Integer> supportedColorList = Arrays.asList(
     98     CodecCapabilities.COLOR_FormatYUV420Planar,
     99     CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
    100     CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
    101     COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
    102   private int colorFormat;
    103   private int width;
    104   private int height;
    105   private int stride;
    106   private int sliceHeight;
    107   private boolean hasDecodedFirstFrame;
    108   private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>();
    109   private boolean useSurface;
    110 
    111   // The below variables are only used when decoding to a Surface.
    112   private TextureListener textureListener;
    113   // Max number of output buffers queued before starting to drop decoded frames.
    114   private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
    115   private int droppedFrames;
    116   private Surface surface = null;
    117   private final Queue<DecodedOutputBuffer>
    118       dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
    119 
    120   // MediaCodec error handler - invoked when critical error happens which may prevent
    121   // further use of media codec API. Now it means that one of media codec instances
    122   // is hanging and can no longer be used in the next call.
    123   public static interface MediaCodecVideoDecoderErrorCallback {
    124     void onMediaCodecVideoDecoderCriticalError(int codecErrors);
    125   }
    126 
    127   public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
    128     Logging.d(TAG, "Set error callback");
    129     MediaCodecVideoDecoder.errorCallback = errorCallback;
    130   }
    131 
    132   // Helper struct for findVp8Decoder() below.
    133   private static class DecoderProperties {
    134     public DecoderProperties(String codecName, int colorFormat) {
    135       this.codecName = codecName;
    136       this.colorFormat = colorFormat;
    137     }
    138     public final String codecName; // OpenMax component name for VP8 codec.
    139     public final int colorFormat;  // Color format supported by codec.
    140   }
    141 
    142   private static DecoderProperties findDecoder(
    143       String mime, String[] supportedCodecPrefixes) {
    144     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
    145       return null; // MediaCodec.setParameters is missing.
    146     }
    147     Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
    148     for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
    149       MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
    150       if (info.isEncoder()) {
    151         continue;
    152       }
    153       String name = null;
    154       for (String mimeType : info.getSupportedTypes()) {
    155         if (mimeType.equals(mime)) {
    156           name = info.getName();
    157           break;
    158         }
    159       }
    160       if (name == null) {
    161         continue;  // No HW support in this codec; try the next one.
    162       }
    163       Logging.d(TAG, "Found candidate decoder " + name);
    164 
    165       // Check if this is supported decoder.
    166       boolean supportedCodec = false;
    167       for (String codecPrefix : supportedCodecPrefixes) {
    168         if (name.startsWith(codecPrefix)) {
    169           supportedCodec = true;
    170           break;
    171         }
    172       }
    173       if (!supportedCodec) {
    174         continue;
    175       }
    176 
    177       // Check if codec supports either yuv420 or nv12.
    178       CodecCapabilities capabilities =
    179           info.getCapabilitiesForType(mime);
    180       for (int colorFormat : capabilities.colorFormats) {
    181         Logging.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
    182       }
    183       for (int supportedColorFormat : supportedColorList) {
    184         for (int codecColorFormat : capabilities.colorFormats) {
    185           if (codecColorFormat == supportedColorFormat) {
    186             // Found supported HW decoder.
    187             Logging.d(TAG, "Found target decoder " + name +
    188                 ". Color: 0x" + Integer.toHexString(codecColorFormat));
    189             return new DecoderProperties(name, codecColorFormat);
    190           }
    191         }
    192       }
    193     }
    194     Logging.d(TAG, "No HW decoder found for mime " + mime);
    195     return null;  // No HW decoder.
    196   }
    197 
    198   public static boolean isVp8HwSupported() {
    199     return findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
    200   }
    201 
    202   public static boolean isVp9HwSupported() {
    203     return findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null;
    204   }
    205 
    206   public static boolean isH264HwSupported() {
    207     return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
    208   }
    209 
    210   public static void printStackTrace() {
    211     if (runningInstance != null && runningInstance.mediaCodecThread != null) {
    212       StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
    213       if (mediaCodecStackTraces.length > 0) {
    214         Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
    215         for (StackTraceElement stackTrace : mediaCodecStackTraces) {
    216           Logging.d(TAG, stackTrace.toString());
    217         }
    218       }
    219     }
    220   }
    221 
    222   private void checkOnMediaCodecThread() throws IllegalStateException {
    223     if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
    224       throw new IllegalStateException(
    225           "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
    226           " but is now called on " + Thread.currentThread());
    227     }
    228   }
    229 
    230   // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
    231   private boolean initDecode(
    232       VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
    233     if (mediaCodecThread != null) {
    234       throw new RuntimeException("Forgot to release()?");
    235     }
    236     useSurface = (surfaceTextureHelper != null);
    237     String mime = null;
    238     String[] supportedCodecPrefixes = null;
    239     if (type == VideoCodecType.VIDEO_CODEC_VP8) {
    240       mime = VP8_MIME_TYPE;
    241       supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
    242     } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
    243       mime = VP9_MIME_TYPE;
    244       supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
    245     } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
    246       mime = H264_MIME_TYPE;
    247       supportedCodecPrefixes = supportedH264HwCodecPrefixes;
    248     } else {
    249       throw new RuntimeException("Non supported codec " + type);
    250     }
    251     DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
    252     if (properties == null) {
    253       throw new RuntimeException("Cannot find HW decoder for " + type);
    254     }
    255     Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
    256         ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
    257         ". Use Surface: " + useSurface);
    258     runningInstance = this; // Decoder is now running and can be queried for stack traces.
    259     mediaCodecThread = Thread.currentThread();
    260     try {
    261       this.width = width;
    262       this.height = height;
    263       stride = width;
    264       sliceHeight = height;
    265 
    266       if (useSurface) {
    267         textureListener = new TextureListener(surfaceTextureHelper);
    268         surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
    269       }
    270 
    271       MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
    272       if (!useSurface) {
    273         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
    274       }
    275       Logging.d(TAG, "  Format: " + format);
    276       mediaCodec =
    277           MediaCodecVideoEncoder.createByCodecName(properties.codecName);
    278       if (mediaCodec == null) {
    279         Logging.e(TAG, "Can not create media decoder");
    280         return false;
    281       }
    282       mediaCodec.configure(format, surface, null, 0);
    283       mediaCodec.start();
    284       colorFormat = properties.colorFormat;
    285       outputBuffers = mediaCodec.getOutputBuffers();
    286       inputBuffers = mediaCodec.getInputBuffers();
    287       decodeStartTimeMs.clear();
    288       hasDecodedFirstFrame = false;
    289       dequeuedSurfaceOutputBuffers.clear();
    290       droppedFrames = 0;
    291       Logging.d(TAG, "Input buffers: " + inputBuffers.length +
    292           ". Output buffers: " + outputBuffers.length);
    293       return true;
    294     } catch (IllegalStateException e) {
    295       Logging.e(TAG, "initDecode failed", e);
    296       return false;
    297     }
    298   }
    299 
    300   private void release() {
    301     Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
    302     checkOnMediaCodecThread();
    303 
    304     // Run Mediacodec stop() and release() on separate thread since sometime
    305     // Mediacodec.stop() may hang.
    306     final CountDownLatch releaseDone = new CountDownLatch(1);
    307 
    308     Runnable runMediaCodecRelease = new Runnable() {
    309       @Override
    310       public void run() {
    311         try {
    312           Logging.d(TAG, "Java releaseDecoder on release thread");
    313           mediaCodec.stop();
    314           mediaCodec.release();
    315           Logging.d(TAG, "Java releaseDecoder on release thread done");
    316         } catch (Exception e) {
    317           Logging.e(TAG, "Media decoder release failed", e);
    318         }
    319         releaseDone.countDown();
    320       }
    321     };
    322     new Thread(runMediaCodecRelease).start();
    323 
    324     if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
    325       Logging.e(TAG, "Media decoder release timeout");
    326       codecErrors++;
    327       if (errorCallback != null) {
    328         Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
    329         errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
    330       }
    331     }
    332 
    333     mediaCodec = null;
    334     mediaCodecThread = null;
    335     runningInstance = null;
    336     if (useSurface) {
    337       surface.release();
    338       surface = null;
    339       textureListener.release();
    340     }
    341     Logging.d(TAG, "Java releaseDecoder done");
    342   }
    343 
    344   // Dequeue an input buffer and return its index, -1 if no input buffer is
    345   // available, or -2 if the codec is no longer operative.
    346   private int dequeueInputBuffer() {
    347     checkOnMediaCodecThread();
    348     try {
    349       return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
    350     } catch (IllegalStateException e) {
    351       Logging.e(TAG, "dequeueIntputBuffer failed", e);
    352       return -2;
    353     }
    354   }
    355 
    356   private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
    357       long timeStampMs, long ntpTimeStamp) {
    358     checkOnMediaCodecThread();
    359     try {
    360       inputBuffers[inputBufferIndex].position(0);
    361       inputBuffers[inputBufferIndex].limit(size);
    362       decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
    363           ntpTimeStamp));
    364       mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
    365       return true;
    366     }
    367     catch (IllegalStateException e) {
    368       Logging.e(TAG, "decode failed", e);
    369       return false;
    370     }
    371   }
    372 
    373   private static class TimeStamps {
    374     public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
    375       this.decodeStartTimeMs = decodeStartTimeMs;
    376       this.timeStampMs = timeStampMs;
    377       this.ntpTimeStampMs = ntpTimeStampMs;
    378     }
    379     private final long decodeStartTimeMs; // Time when this frame was queued for decoding.
    380     private final long timeStampMs; // Only used for bookkeeping in Java. Used in C++;
    381     private final long ntpTimeStampMs; // Only used for bookkeeping in Java. Used in C++;
    382   }
    383 
    384   // Helper struct for dequeueOutputBuffer() below.
    385   private static class DecodedOutputBuffer {
    386     public DecodedOutputBuffer(int index, int offset, int size, long timeStampMs,
    387         long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
    388       this.index = index;
    389       this.offset = offset;
    390       this.size = size;
    391       this.timeStampMs = timeStampMs;
    392       this.ntpTimeStampMs = ntpTimeStampMs;
    393       this.decodeTimeMs = decodeTime;
    394       this.endDecodeTimeMs = endDecodeTime;
    395     }
    396 
    397     private final int index;
    398     private final int offset;
    399     private final int size;
    400     private final long timeStampMs;
    401     private final long ntpTimeStampMs;
    402     // Number of ms it took to decode this frame.
    403     private final long decodeTimeMs;
    404     // System time when this frame finished decoding.
    405     private final long endDecodeTimeMs;
    406   }
    407 
    408   // Helper struct for dequeueTextureBuffer() below.
    409   private static class DecodedTextureBuffer {
    410     private final int textureID;
    411     private final float[] transformMatrix;
    412     private final long timeStampMs;
    413     private final long ntpTimeStampMs;
    414     private final long decodeTimeMs;
    415     // Interval from when the frame finished decoding until this buffer has been created.
    416     // Since there is only one texture, this interval depend on the time from when
    417     // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
    418     // so that the texture can be updated with the next decoded frame.
    419     private final long frameDelayMs;
    420 
    421     // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
    422     // that was dropped.
    423     public DecodedTextureBuffer(int textureID, float[] transformMatrix, long timeStampMs,
    424         long ntpTimeStampMs, long decodeTimeMs, long frameDelay) {
    425       this.textureID = textureID;
    426       this.transformMatrix = transformMatrix;
    427       this.timeStampMs = timeStampMs;
    428       this.ntpTimeStampMs = ntpTimeStampMs;
    429       this.decodeTimeMs = decodeTimeMs;
    430       this.frameDelayMs = frameDelay;
    431     }
    432   }
    433 
    434   // Poll based texture listener.
    435   private static class TextureListener
    436       implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
    437     private final SurfaceTextureHelper surfaceTextureHelper;
    438     // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
    439     private final Object newFrameLock = new Object();
    440     // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
    441     // onTextureFrameAvailable().
    442     private DecodedOutputBuffer bufferToRender;
    443     private DecodedTextureBuffer renderedBuffer;
    444 
    445     public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
    446       this.surfaceTextureHelper = surfaceTextureHelper;
    447       surfaceTextureHelper.setListener(this);
    448     }
    449 
    450     public void addBufferToRender(DecodedOutputBuffer buffer) {
    451       if (bufferToRender != null) {
    452         Logging.e(TAG,
    453             "Unexpected addBufferToRender() called while waiting for a texture.");
    454         throw new IllegalStateException("Waiting for a texture.");
    455       }
    456       bufferToRender = buffer;
    457     }
    458 
    459     public boolean isWaitingForTexture() {
    460       synchronized (newFrameLock) {
    461         return bufferToRender != null;
    462       }
    463     }
    464 
    465     // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
    466     @Override
    467     public void onTextureFrameAvailable(
    468         int oesTextureId, float[] transformMatrix, long timestampNs) {
    469       synchronized (newFrameLock) {
    470         if (renderedBuffer != null) {
    471           Logging.e(TAG,
    472               "Unexpected onTextureFrameAvailable() called while already holding a texture.");
    473           throw new IllegalStateException("Already holding a texture.");
    474         }
    475         // |timestampNs| is always zero on some Android versions.
    476         renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
    477             bufferToRender.timeStampMs, bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
    478             SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
    479         bufferToRender = null;
    480         newFrameLock.notifyAll();
    481       }
    482     }
    483 
    484     // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
    485     public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
    486       synchronized (newFrameLock) {
    487         if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
    488           try {
    489             newFrameLock.wait(timeoutMs);
    490           } catch(InterruptedException e) {
    491             // Restore the interrupted status by reinterrupting the thread.
    492             Thread.currentThread().interrupt();
    493           }
    494         }
    495         DecodedTextureBuffer returnedBuffer = renderedBuffer;
    496         renderedBuffer = null;
    497         return returnedBuffer;
    498       }
    499     }
    500 
    501     public void release() {
    502       // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
    503       // progress is done. Therefore, the call to disconnect() must be outside any synchronized
    504       // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
    505       surfaceTextureHelper.disconnect();
    506       synchronized (newFrameLock) {
    507         if (renderedBuffer != null) {
    508           surfaceTextureHelper.returnTextureFrame();
    509           renderedBuffer = null;
    510         }
    511       }
    512     }
    513   }
    514 
    515   // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
    516   // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
    517   // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
    518   // upon codec error.
    519   private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
    520     checkOnMediaCodecThread();
    521     if (decodeStartTimeMs.isEmpty()) {
    522       return null;
    523     }
    524     // Drain the decoder until receiving a decoded buffer or hitting
    525     // MediaCodec.INFO_TRY_AGAIN_LATER.
    526     final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    527     while (true) {
    528       final int result = mediaCodec.dequeueOutputBuffer(
    529           info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
    530       switch (result) {
    531         case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
    532           outputBuffers = mediaCodec.getOutputBuffers();
    533           Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
    534           if (hasDecodedFirstFrame) {
    535             throw new RuntimeException("Unexpected output buffer change event.");
    536           }
    537           break;
    538         case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
    539           MediaFormat format = mediaCodec.getOutputFormat();
    540           Logging.d(TAG, "Decoder format changed: " + format.toString());
    541           int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
    542           int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
    543           if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
    544             throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
    545                 height + ". New " + new_width + "*" + new_height);
    546           }
    547           width = format.getInteger(MediaFormat.KEY_WIDTH);
    548           height = format.getInteger(MediaFormat.KEY_HEIGHT);
    549 
    550           if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
    551             colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
    552             Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
    553             if (!supportedColorList.contains(colorFormat)) {
    554               throw new IllegalStateException("Non supported color format: " + colorFormat);
    555             }
    556           }
    557           if (format.containsKey("stride")) {
    558             stride = format.getInteger("stride");
    559           }
    560           if (format.containsKey("slice-height")) {
    561             sliceHeight = format.getInteger("slice-height");
    562           }
    563           Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
    564           stride = Math.max(width, stride);
    565           sliceHeight = Math.max(height, sliceHeight);
    566           break;
    567         case MediaCodec.INFO_TRY_AGAIN_LATER:
    568           return null;
    569         default:
    570           hasDecodedFirstFrame = true;
    571           TimeStamps timeStamps = decodeStartTimeMs.remove();
    572           return new DecodedOutputBuffer(result, info.offset, info.size, timeStamps.timeStampMs,
    573               timeStamps.ntpTimeStampMs,
    574               SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs,
    575               SystemClock.elapsedRealtime());
    576         }
    577     }
    578   }
    579 
    580   // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
    581   // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
    582   // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
    583   // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
    584   // a frame can't be returned.
    585   private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
    586     checkOnMediaCodecThread();
    587     if (!useSurface) {
    588       throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
    589     }
    590     DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
    591     if (outputBuffer != null) {
    592       dequeuedSurfaceOutputBuffers.add(outputBuffer);
    593     }
    594 
    595     MaybeRenderDecodedTextureBuffer();
    596     // Check if there is texture ready now by waiting max |dequeueTimeoutMs|.
    597     DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs);
    598     if (renderedBuffer != null) {
    599       MaybeRenderDecodedTextureBuffer();
    600       return renderedBuffer;
    601     }
    602 
    603     if ((dequeuedSurfaceOutputBuffers.size()
    604          >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
    605          || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
    606       ++droppedFrames;
    607       // Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
    608       // The oldest frame is owned by |textureListener| and can't be dropped since
    609       // mediaCodec.releaseOutputBuffer has already been called.
    610       final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove();
    611       if (dequeueTimeoutMs > 0) {
    612         // TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
    613         // return the one and only texture even if it does not render.
    614         // Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
    615         //    + droppedFrame.timeStampMs + ". Total number of dropped frames: " + droppedFrames);
    616       } else {
    617         Logging.w(TAG, "Too many output buffers. Dropping frame with TS: "
    618             + droppedFrame.timeStampMs + ". Total number of dropped frames: " + droppedFrames);
    619       }
    620 
    621       mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
    622       return new DecodedTextureBuffer(0, null, droppedFrame.timeStampMs,
    623           droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
    624           SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
    625     }
    626     return null;
    627   }
    628 
    629   private void MaybeRenderDecodedTextureBuffer() {
    630     if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) {
    631       return;
    632     }
    633     // Get the first frame in the queue and render to the decoder output surface.
    634     final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
    635     textureListener.addBufferToRender(buffer);
    636     mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
    637   }
    638 
    639   // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
    640   // non-surface decoding.
    641   // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
    642   // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
    643   // MediaCodec.CodecException upon codec error.
    644   private void returnDecodedOutputBuffer(int index)
    645       throws IllegalStateException, MediaCodec.CodecException {
    646     checkOnMediaCodecThread();
    647     if (useSurface) {
    648       throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
    649     }
    650     mediaCodec.releaseOutputBuffer(index, false /* render */);
    651   }
    652 }
    653