Home | History | Annotate | Download | only in cts
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.media.cts;
     18 
     19 import android.media.MediaCodec;
     20 import android.media.MediaCodecInfo;
     21 import android.media.MediaCodecList;
     22 import android.media.MediaFormat;
     23 import android.opengl.GLES20;
     24 import android.test.AndroidTestCase;
     25 import android.util.Log;
     26 
     27 import java.io.FileOutputStream;
     28 import java.io.IOException;
     29 import java.nio.ByteBuffer;
     30 import java.util.Arrays;
     31 
     32 import javax.microedition.khronos.opengles.GL10;
     33 
     34 
     35 /**
     36  * Generates a series of video frames, encodes them, decodes them, and tests for significant
     37  * divergence from the original.
     38  * <p>
     39  * We copy the data from the encoder's output buffers to the decoder's input buffers, running
     40  * them in parallel.  The first buffer output for video/avc contains codec configuration data,
     41  * which we must carefully forward to the decoder.
     42  * <p>
     43  * An alternative approach would be to save the output of the decoder as an mpeg4 video
     44  * file, and read it back in from disk.  The data we're generating is just an elementary
     45  * stream, so we'd need to perform additional steps to make that happen.
     46  */
     47 public class EncodeDecodeTest extends AndroidTestCase {
     48     private static final String TAG = "EncodeDecodeTest";
     49     private static final boolean VERBOSE = false;           // lots of logging
     50     private static final boolean DEBUG_SAVE_FILE = false;   // save copy of encoded movie
     51     private static final String DEBUG_FILE_NAME_BASE = "/sdcard/test.";
     52 
     53     // parameters for the encoder
     54     private static final String MIME_TYPE = "video/avc";    // H.264 Advanced Video Coding
     55     private static final int FRAME_RATE = 15;               // 15fps
     56     private static final int IFRAME_INTERVAL = 10;          // 10 seconds between I-frames
     57 
     58     // movie length, in frames
     59     private static final int NUM_FRAMES = 30;               // two seconds of video
     60 
     61     private static final int TEST_Y = 120;                  // YUV values for colored rect
     62     private static final int TEST_U = 160;
     63     private static final int TEST_V = 200;
     64     private static final int TEST_R0 = 0;                   // RGB equivalent of {0,0,0}
     65     private static final int TEST_G0 = 136;
     66     private static final int TEST_B0 = 0;
     67     private static final int TEST_R1 = 236;                 // RGB equivalent of {120,160,200}
     68     private static final int TEST_G1 = 50;
     69     private static final int TEST_B1 = 186;
     70 
     71     // size of a frame, in pixels
     72     private int mWidth = -1;
     73     private int mHeight = -1;
     74     // bit rate, in bits per second
     75     private int mBitRate = -1;
     76 
     77     // largest color component delta seen (i.e. actual vs. expected)
     78     private int mLargestColorDelta;
     79 
     80 
     81     /**
     82      * Tests streaming of AVC video through the encoder and decoder.  Data is encoded from
     83      * a series of byte[] buffers and decoded into ByteBuffers.  The output is checked for
     84      * validity.
     85      */
     86     public void testEncodeDecodeVideoFromBufferToBufferQCIF() throws Exception {
     87         setParameters(176, 144, 1000000);
     88         encodeDecodeVideoFromBuffer(false);
     89     }
     90     public void testEncodeDecodeVideoFromBufferToBufferQVGA() throws Exception {
     91         setParameters(320, 240, 2000000);
     92         encodeDecodeVideoFromBuffer(false);
     93     }
     94     public void testEncodeDecodeVideoFromBufferToBuffer720p() throws Exception {
     95         setParameters(1280, 720, 6000000);
     96         encodeDecodeVideoFromBuffer(false);
     97     }
     98 
     99     /**
    100      * Tests streaming of AVC video through the encoder and decoder.  Data is encoded from
    101      * a series of byte[] buffers and decoded into Surfaces.  The output is checked for
    102      * validity.
    103      * <p>
    104      * Because of the way SurfaceTexture.OnFrameAvailableListener works, we need to run this
    105      * test on a thread that doesn't have a Looper configured.  If we don't, the test will
    106      * pass, but we won't actually test the output because we'll never receive the "frame
    107      * available" notifications".  The CTS test framework seems to be configuring a Looper on
    108      * the test thread, so we have to hand control off to a new thread for the duration of
    109      * the test.
    110      */
    111     public void testEncodeDecodeVideoFromBufferToSurfaceQCIF() throws Throwable {
    112         setParameters(176, 144, 1000000);
    113         BufferToSurfaceWrapper.runTest(this);
    114     }
    115     public void testEncodeDecodeVideoFromBufferToSurfaceQVGA() throws Throwable {
    116         setParameters(320, 240, 2000000);
    117         BufferToSurfaceWrapper.runTest(this);
    118     }
    119     public void testEncodeDecodeVideoFromBufferToSurface720p() throws Throwable {
    120         setParameters(1280, 720, 6000000);
    121         BufferToSurfaceWrapper.runTest(this);
    122     }
    123 
    124     /** Wraps testEncodeDecodeVideoFromBuffer(true) */
    125     private static class BufferToSurfaceWrapper implements Runnable {
    126         private Throwable mThrowable;
    127         private EncodeDecodeTest mTest;
    128 
    129         private BufferToSurfaceWrapper(EncodeDecodeTest test) {
    130             mTest = test;
    131         }
    132 
    133         @Override
    134         public void run() {
    135             try {
    136                 mTest.encodeDecodeVideoFromBuffer(true);
    137             } catch (Throwable th) {
    138                 mThrowable = th;
    139             }
    140         }
    141 
    142         /**
    143          * Entry point.
    144          */
    145         public static void runTest(EncodeDecodeTest obj) throws Throwable {
    146             BufferToSurfaceWrapper wrapper = new BufferToSurfaceWrapper(obj);
    147             Thread th = new Thread(wrapper, "codec test");
    148             th.start();
    149             th.join();
    150             if (wrapper.mThrowable != null) {
    151                 throw wrapper.mThrowable;
    152             }
    153         }
    154     }
    155 
    156     /**
    157      * Tests streaming of AVC video through the encoder and decoder.  Data is provided through
    158      * a Surface and decoded onto a Surface.  The output is checked for validity.
    159      */
    160     public void testEncodeDecodeVideoFromSurfaceToSurfaceQCIF() throws Throwable {
    161         setParameters(176, 144, 1000000);
    162         SurfaceToSurfaceWrapper.runTest(this);
    163     }
    164     public void testEncodeDecodeVideoFromSurfaceToSurfaceQVGA() throws Throwable {
    165         setParameters(320, 240, 2000000);
    166         SurfaceToSurfaceWrapper.runTest(this);
    167     }
    168     public void testEncodeDecodeVideoFromSurfaceToSurface720p() throws Throwable {
    169         setParameters(1280, 720, 6000000);
    170         SurfaceToSurfaceWrapper.runTest(this);
    171     }
    172 
    173     /** Wraps testEncodeDecodeVideoFromSurfaceToSurface() */
    174     private static class SurfaceToSurfaceWrapper implements Runnable {
    175         private Throwable mThrowable;
    176         private EncodeDecodeTest mTest;
    177 
    178         private SurfaceToSurfaceWrapper(EncodeDecodeTest test) {
    179             mTest = test;
    180         }
    181 
    182         @Override
    183         public void run() {
    184             try {
    185                 mTest.encodeDecodeVideoFromSurfaceToSurface();
    186             } catch (Throwable th) {
    187                 mThrowable = th;
    188             }
    189         }
    190 
    191         /**
    192          * Entry point.
    193          */
    194         public static void runTest(EncodeDecodeTest obj) throws Throwable {
    195             SurfaceToSurfaceWrapper wrapper = new SurfaceToSurfaceWrapper(obj);
    196             Thread th = new Thread(wrapper, "codec test");
    197             th.start();
    198             th.join();
    199             if (wrapper.mThrowable != null) {
    200                 throw wrapper.mThrowable;
    201             }
    202         }
    203     }
    204 
    205     /**
    206      * Sets the desired frame size and bit rate.
    207      */
    208     private void setParameters(int width, int height, int bitRate) {
    209         if ((width % 16) != 0 || (height % 16) != 0) {
    210             Log.w(TAG, "WARNING: width or height not multiple of 16");
    211         }
    212         mWidth = width;
    213         mHeight = height;
    214         mBitRate = bitRate;
    215     }
    216 
    217     /**
    218      * Tests encoding and subsequently decoding video from frames generated into a buffer.
    219      * <p>
    220      * We encode several frames of a video test pattern using MediaCodec, then decode the
    221      * output with MediaCodec and do some simple checks.
    222      * <p>
    223      * See http://b.android.com/37769 for a discussion of input format pitfalls.
    224      */
    225     private void encodeDecodeVideoFromBuffer(boolean toSurface) throws Exception {
    226         MediaCodec encoder = null;
    227         MediaCodec decoder = null;
    228 
    229         mLargestColorDelta = -1;
    230 
    231         try {
    232             MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
    233             if (codecInfo == null) {
    234                 // Don't fail CTS if they don't have an AVC codec (not here, anyway).
    235                 Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
    236                 return;
    237             }
    238             if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName());
    239 
    240             int colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
    241             if (VERBOSE) Log.d(TAG, "found colorFormat: " + colorFormat);
    242 
    243             // We avoid the device-specific limitations on width and height by using values that
    244             // are multiples of 16, which all tested devices seem to be able to handle.
    245             MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
    246 
    247             // Set some properties.  Failing to specify some of these can cause the MediaCodec
    248             // configure() call to throw an unhelpful exception.
    249             format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
    250             format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
    251             format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
    252             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
    253             if (VERBOSE) Log.d(TAG, "format: " + format);
    254 
    255             // Create a MediaCodec for the desired codec, then configure it as an encoder with
    256             // our desired properties.
    257             encoder = MediaCodec.createByCodecName(codecInfo.getName());
    258             encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    259             encoder.start();
    260 
    261             // Create a MediaCodec for the decoder, just based on the MIME type.  The various
    262             // format details will be passed through the csd-0 meta-data later on.
    263             decoder = MediaCodec.createDecoderByType(MIME_TYPE);
    264 
    265             doEncodeDecodeVideoFromBuffer(encoder, colorFormat, decoder, toSurface);
    266         } finally {
    267             if (VERBOSE) Log.d(TAG, "releasing codecs");
    268             if (encoder != null) {
    269                 encoder.stop();
    270                 encoder.release();
    271             }
    272             if (decoder != null) {
    273                 decoder.stop();
    274                 decoder.release();
    275             }
    276 
    277             Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
    278         }
    279     }
    280 
    281     /**
    282      * Tests encoding and subsequently decoding video from frames generated into a buffer.
    283      * <p>
    284      * We encode several frames of a video test pattern using MediaCodec, then decode the
    285      * output with MediaCodec and do some simple checks.
    286      */
    287     private void encodeDecodeVideoFromSurfaceToSurface() throws Exception {
    288         MediaCodec encoder = null;
    289         MediaCodec decoder = null;
    290         InputSurface inputSurface = null;
    291         OutputSurface outputSurface = null;
    292 
    293         mLargestColorDelta = -1;
    294 
    295         try {
    296             MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
    297             if (codecInfo == null) {
    298                 // Don't fail CTS if they don't have an AVC codec (not here, anyway).
    299                 Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
    300                 return;
    301             }
    302             if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName());
    303 
    304             int colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
    305 
    306             // We avoid the device-specific limitations on width and height by using values that
    307             // are multiples of 16, which all tested devices seem to be able to handle.
    308             MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
    309 
    310             // Set some properties.  Failing to specify some of these can cause the MediaCodec
    311             // configure() call to throw an unhelpful exception.
    312             format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
    313             format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
    314             format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
    315             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
    316             if (VERBOSE) Log.d(TAG, "format: " + format);
    317 
    318             // Create the output surface.
    319             outputSurface = new OutputSurface(mWidth, mHeight);
    320 
    321             // Create a MediaCodec for the decoder, just based on the MIME type.  The various
    322             // format details will be passed through the csd-0 meta-data later on.
    323             decoder = MediaCodec.createDecoderByType(MIME_TYPE);
    324             MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
    325             decoder.configure(format, outputSurface.getSurface(), null, 0);
    326             decoder.start();
    327 
    328             // Create a MediaCodec for the desired codec, then configure it as an encoder with
    329             // our desired properties.  Request a Surface to use for input.
    330             encoder = MediaCodec.createByCodecName(codecInfo.getName());
    331             encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    332             inputSurface = new InputSurface(encoder.createInputSurface());
    333             encoder.start();
    334 
    335             doEncodeDecodeVideoFromSurfaceToSurface(encoder, inputSurface, decoder, outputSurface);
    336         } finally {
    337             if (VERBOSE) Log.d(TAG, "releasing codecs");
    338             if (inputSurface != null) {
    339                 inputSurface.release();
    340             }
    341             if (outputSurface != null) {
    342                 outputSurface.release();
    343             }
    344             if (encoder != null) {
    345                 encoder.stop();
    346                 encoder.release();
    347             }
    348             if (decoder != null) {
    349                 decoder.stop();
    350                 decoder.release();
    351             }
    352 
    353             Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
    354         }
    355     }
    356 
    357     /**
    358      * Returns the first codec capable of encoding the specified MIME type, or null if no
    359      * match was found.
    360      */
    361     private static MediaCodecInfo selectCodec(String mimeType) {
    362         int numCodecs = MediaCodecList.getCodecCount();
    363         for (int i = 0; i < numCodecs; i++) {
    364             MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
    365 
    366             if (!codecInfo.isEncoder()) {
    367                 continue;
    368             }
    369 
    370             String[] types = codecInfo.getSupportedTypes();
    371             for (int j = 0; j < types.length; j++) {
    372                 if (types[j].equalsIgnoreCase(mimeType)) {
    373                     return codecInfo;
    374                 }
    375             }
    376         }
    377         return null;
    378     }
    379 
    380     /**
    381      * Returns a color format that is supported by the codec and by this test code.  If no
    382      * match is found, this throws a test failure -- the set of formats known to the test
    383      * should be expanded for new platforms.
    384      */
    385     private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) {
    386         MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
    387         for (int i = 0; i < capabilities.colorFormats.length; i++) {
    388             int colorFormat = capabilities.colorFormats[i];
    389             if (isRecognizedFormat(colorFormat)) {
    390                 return colorFormat;
    391             }
    392         }
    393         fail("couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
    394         return 0;   // not reached
    395     }
    396 
    397     /**
    398      * Returns true if this is a color format that this test code understands (i.e. we know how
    399      * to read and generate frames in this format).
    400      */
    401     private static boolean isRecognizedFormat(int colorFormat) {
    402         switch (colorFormat) {
    403             // these are the formats we know how to handle for this test
    404             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
    405             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
    406             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
    407             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
    408             case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
    409                 return true;
    410             default:
    411                 return false;
    412         }
    413     }
    414 
    415     /**
    416      * Returns true if the specified color format is semi-planar YUV.  Throws an exception
    417      * if the color format is not recognized (e.g. not YUV).
    418      */
    419     private static boolean isSemiPlanarYUV(int colorFormat) {
    420         switch (colorFormat) {
    421             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
    422             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
    423                 return false;
    424             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
    425             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
    426             case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
    427                 return true;
    428             default:
    429                 throw new RuntimeException("unknown format " + colorFormat);
    430         }
    431     }
    432 
    433     /**
    434      * Does the actual work for encoding frames from buffers of byte[].
    435      */
    436     private void doEncodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat,
    437             MediaCodec decoder, boolean toSurface) {
    438         final int TIMEOUT_USEC = 10000;
    439         ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers();
    440         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
    441         ByteBuffer[] decoderInputBuffers = null;
    442         ByteBuffer[] decoderOutputBuffers = null;
    443         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    444         MediaFormat decoderOutputFormat = null;
    445         int generateIndex = 0;
    446         int checkIndex = 0;
    447         int badFrames = 0;
    448         boolean decoderConfigured = false;
    449         OutputSurface outputSurface = null;
    450 
    451         // The size of a frame of video data, in the formats we handle, is stride*sliceHeight
    452         // for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels.  Application
    453         // of algebra and assuming that stride==width and sliceHeight==height yields:
    454         byte[] frameData = new byte[mWidth * mHeight * 3 / 2];
    455 
    456         // Just out of curiosity.
    457         long rawSize = 0;
    458         long encodedSize = 0;
    459 
    460         // Save a copy to disk.  Useful for debugging the test.  Note this is a raw elementary
    461         // stream, not a .mp4 file, so not all players will know what to do with it.
    462         FileOutputStream outputStream = null;
    463         if (DEBUG_SAVE_FILE) {
    464             String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4";
    465             try {
    466                 outputStream = new FileOutputStream(fileName);
    467                 Log.d(TAG, "encoded output will be saved as " + fileName);
    468             } catch (IOException ioe) {
    469                 Log.w(TAG, "Unable to create debug output file " + fileName);
    470                 throw new RuntimeException(ioe);
    471             }
    472         }
    473 
    474         if (toSurface) {
    475             outputSurface = new OutputSurface(mWidth, mHeight);
    476         }
    477 
    478         // Loop until the output side is done.
    479         boolean inputDone = false;
    480         boolean encoderDone = false;
    481         boolean outputDone = false;
    482         while (!outputDone) {
    483             if (VERBOSE) Log.d(TAG, "loop");
    484 
    485             // If we're not done submitting frames, generate a new one and submit it.  By
    486             // doing this on every loop we're working to ensure that the encoder always has
    487             // work to do.
    488             //
    489             // We don't really want a timeout here, but sometimes there's a delay opening
    490             // the encoder device, so a short timeout can keep us from spinning hard.
    491             if (!inputDone) {
    492                 int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
    493                 if (VERBOSE) Log.d(TAG, "inputBufIndex=" + inputBufIndex);
    494                 if (inputBufIndex >= 0) {
    495                     long ptsUsec = computePresentationTime(generateIndex);
    496                     if (generateIndex == NUM_FRAMES) {
    497                         // Send an empty frame with the end-of-stream flag set.  If we set EOS
    498                         // on a frame with data, that frame data will be ignored, and the
    499                         // output will be short one frame.
    500                         encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec,
    501                                 MediaCodec.BUFFER_FLAG_END_OF_STREAM);
    502                         inputDone = true;
    503                         if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)");
    504                     } else {
    505                         generateFrame(generateIndex, encoderColorFormat, frameData);
    506 
    507                         ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex];
    508                         // the buffer should be sized to hold one full frame
    509                         assertTrue(inputBuf.capacity() >= frameData.length);
    510                         inputBuf.clear();
    511                         inputBuf.put(frameData);
    512 
    513                         encoder.queueInputBuffer(inputBufIndex, 0, frameData.length, ptsUsec, 0);
    514                         if (VERBOSE) Log.d(TAG, "submitted frame " + generateIndex + " to enc");
    515                     }
    516                     generateIndex++;
    517                 } else {
    518                     // either all in use, or we timed out during initial setup
    519                     if (VERBOSE) Log.d(TAG, "input buffer not available");
    520                 }
    521             }
    522 
    523             // Check for output from the encoder.  If there's no output yet, we either need to
    524             // provide more input, or we need to wait for the encoder to work its magic.  We
    525             // can't actually tell which is the case, so if we can't get an output buffer right
    526             // away we loop around and see if it wants more input.
    527             //
    528             // Once we get EOS from the encoder, we don't need to do this anymore.
    529             if (!encoderDone) {
    530                 int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
    531                 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
    532                     // no output available yet
    533                     if (VERBOSE) Log.d(TAG, "no output from encoder available");
    534                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    535                     // not expected for an encoder
    536                     encoderOutputBuffers = encoder.getOutputBuffers();
    537                     if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
    538                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    539                     // not expected for an encoder
    540                     MediaFormat newFormat = encoder.getOutputFormat();
    541                     if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
    542                 } else if (encoderStatus < 0) {
    543                     fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
    544                 } else { // encoderStatus >= 0
    545                     ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
    546                     if (encodedData == null) {
    547                         fail("encoderOutputBuffer " + encoderStatus + " was null");
    548                     }
    549 
    550                     // It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
    551                     encodedData.position(info.offset);
    552                     encodedData.limit(info.offset + info.size);
    553 
    554                     encodedSize += info.size;
    555                     if (outputStream != null) {
    556                         byte[] data = new byte[info.size];
    557                         encodedData.get(data);
    558                         encodedData.position(info.offset);
    559                         try {
    560                             outputStream.write(data);
    561                         } catch (IOException ioe) {
    562                             Log.w(TAG, "failed writing debug data to file");
    563                             throw new RuntimeException(ioe);
    564                         }
    565                     }
    566                     if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
    567                         // Codec config info.  Only expected on first packet.  One way to
    568                         // handle this is to manually stuff the data into the MediaFormat
    569                         // and pass that to configure().  We do that here to exercise the API.
    570                         assertFalse(decoderConfigured);
    571                         MediaFormat format =
    572                                 MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
    573                         format.setByteBuffer("csd-0", encodedData);
    574                         decoder.configure(format, toSurface ? outputSurface.getSurface() : null,
    575                                 null, 0);
    576                         decoder.start();
    577                         decoderInputBuffers = decoder.getInputBuffers();
    578                         decoderOutputBuffers = decoder.getOutputBuffers();
    579                         decoderConfigured = true;
    580                         if (VERBOSE) Log.d(TAG, "decoder configured (" + info.size + " bytes)");
    581                     } else {
    582                         // Get a decoder input buffer, blocking until it's available.
    583                         assertTrue(decoderConfigured);
    584                         int inputBufIndex = decoder.dequeueInputBuffer(-1);
    585                         ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
    586                         inputBuf.clear();
    587                         inputBuf.put(encodedData);
    588                         decoder.queueInputBuffer(inputBufIndex, 0, info.size,
    589                                 info.presentationTimeUs, info.flags);
    590 
    591                         encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
    592                         if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder"
    593                                 + (encoderDone ? " (EOS)" : ""));
    594                     }
    595 
    596                     encoder.releaseOutputBuffer(encoderStatus, false);
    597                 }
    598             }
    599 
    600             // Check for output from the decoder.  We want to do this on every loop to avoid
    601             // the possibility of stalling the pipeline.  We use a short timeout to avoid
    602             // burning CPU if the decoder is hard at work but the next frame isn't quite ready.
    603             //
    604             // If we're decoding to a Surface, we'll get notified here as usual but the
    605             // ByteBuffer references will be null.  The data is sent to Surface instead.
    606             if (decoderConfigured) {
    607                 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
    608                 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
    609                     // no output available yet
    610                     if (VERBOSE) Log.d(TAG, "no output from decoder available");
    611                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    612                     // The storage associated with the direct ByteBuffer may already be unmapped,
    613                     // so attempting to access data through the old output buffer array could
    614                     // lead to a native crash.
    615                     if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
    616                     decoderOutputBuffers = decoder.getOutputBuffers();
    617                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    618                     // this happens before the first frame is returned
    619                     decoderOutputFormat = decoder.getOutputFormat();
    620                     if (VERBOSE) Log.d(TAG, "decoder output format changed: " +
    621                             decoderOutputFormat);
    622                 } else if (decoderStatus < 0) {
    623                     fail("unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus);
    624                 } else {  // decoderStatus >= 0
    625                     if (!toSurface) {
    626                         ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus];
    627 
    628                         outputFrame.position(info.offset);
    629                         outputFrame.limit(info.offset + info.size);
    630 
    631                         rawSize += info.size;
    632                         if (info.size == 0) {
    633                             if (VERBOSE) Log.d(TAG, "got empty frame");
    634                         } else {
    635                             if (VERBOSE) Log.d(TAG, "decoded, checking frame " + checkIndex);
    636                             assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
    637                                     info.presentationTimeUs);
    638                             if (!checkFrame(checkIndex++, decoderOutputFormat, outputFrame)) {
    639                                 badFrames++;
    640                             }
    641                         }
    642 
    643                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    644                             if (VERBOSE) Log.d(TAG, "output EOS");
    645                             outputDone = true;
    646                         }
    647                         decoder.releaseOutputBuffer(decoderStatus, false /*render*/);
    648                     } else {
    649                         if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
    650                                 " (size=" + info.size + ")");
    651                         rawSize += info.size;
    652                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    653                             if (VERBOSE) Log.d(TAG, "output EOS");
    654                             outputDone = true;
    655                         }
    656 
    657                         boolean doRender = (info.size != 0);
    658 
    659                         // As soon as we call releaseOutputBuffer, the buffer will be forwarded
    660                         // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
    661                         // that the texture will be available before the call returns, so we
    662                         // need to wait for the onFrameAvailable callback to fire.
    663                         decoder.releaseOutputBuffer(decoderStatus, doRender);
    664                         if (doRender) {
    665                             if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
    666                             assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
    667                                     info.presentationTimeUs);
    668                             outputSurface.awaitNewImage();
    669                             outputSurface.drawImage();
    670                             if (!checkSurfaceFrame(checkIndex++)) {
    671                                 badFrames++;
    672                             }
    673                         }
    674                     }
    675                 }
    676             }
    677         }
    678 
    679         if (VERBOSE) Log.d(TAG, "decoded " + checkIndex + " frames at "
    680                 + mWidth + "x" + mHeight + ": raw=" + rawSize + ", enc=" + encodedSize);
    681         if (outputStream != null) {
    682             try {
    683                 outputStream.close();
    684             } catch (IOException ioe) {
    685                 Log.w(TAG, "failed closing debug file");
    686                 throw new RuntimeException(ioe);
    687             }
    688         }
    689 
    690         if (outputSurface != null) {
    691             outputSurface.release();
    692         }
    693 
    694         if (checkIndex != NUM_FRAMES) {
    695             fail("expected " + NUM_FRAMES + " frames, only decoded " + checkIndex);
    696         }
    697         if (badFrames != 0) {
    698             fail("Found " + badFrames + " bad frames");
    699         }
    700     }
    701 
    702     /**
    703      * Does the actual work for encoding and decoding from Surface to Surface.
    704      */
    705     private void doEncodeDecodeVideoFromSurfaceToSurface(MediaCodec encoder,
    706             InputSurface inputSurface, MediaCodec decoder,
    707             OutputSurface outputSurface) {
    708         final int TIMEOUT_USEC = 10000;
    709         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
    710         ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
    711         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    712         int generateIndex = 0;
    713         int checkIndex = 0;
    714         int badFrames = 0;
    715 
    716         // Save a copy to disk.  Useful for debugging the test.  Note this is a raw elementary
    717         // stream, not a .mp4 file, so not all players will know what to do with it.
    718         FileOutputStream outputStream = null;
    719         if (DEBUG_SAVE_FILE) {
    720             String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4";
    721             try {
    722                 outputStream = new FileOutputStream(fileName);
    723                 Log.d(TAG, "encoded output will be saved as " + fileName);
    724             } catch (IOException ioe) {
    725                 Log.w(TAG, "Unable to create debug output file " + fileName);
    726                 throw new RuntimeException(ioe);
    727             }
    728         }
    729 
    730         // Loop until the output side is done.
    731         boolean inputDone = false;
    732         boolean encoderDone = false;
    733         boolean outputDone = false;
    734         while (!outputDone) {
    735             if (VERBOSE) Log.d(TAG, "loop");
    736 
    737             // If we're not done submitting frames, generate a new one and submit it.  The
    738             // eglSwapBuffers call will block if the input is full.
    739             if (!inputDone) {
    740                 if (generateIndex == NUM_FRAMES) {
    741                     // Send an empty frame with the end-of-stream flag set.
    742                     if (VERBOSE) Log.d(TAG, "signaling input EOS");
    743                     encoder.signalEndOfInputStream();
    744                     inputDone = true;
    745                 } else {
    746                     inputSurface.makeCurrent();
    747                     generateSurfaceFrame(generateIndex);
    748                     inputSurface.setPresentationTime(computePresentationTime(generateIndex) * 1000);
    749                     if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers");
    750                     inputSurface.swapBuffers();
    751                 }
    752                 generateIndex++;
    753             }
    754 
    755             // Assume output is available.  Loop until both assumptions are false.
    756             boolean decoderOutputAvailable = true;
    757             boolean encoderOutputAvailable = !encoderDone;
    758             while (decoderOutputAvailable || encoderOutputAvailable) {
    759                 // Start by draining any pending output from the decoder.  It's important to
    760                 // do this before we try to stuff any more data in.
    761                 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
    762                 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
    763                     // no output available yet
    764                     if (VERBOSE) Log.d(TAG, "no output from decoder available");
    765                     decoderOutputAvailable = false;
    766                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    767                     if (VERBOSE) Log.d(TAG, "decoder output buffers changed (but we don't care)");
    768                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    769                     // this happens before the first frame is returned
    770                     MediaFormat decoderOutputFormat = decoder.getOutputFormat();
    771                     if (VERBOSE) Log.d(TAG, "decoder output format changed: " +
    772                             decoderOutputFormat);
    773                 } else if (decoderStatus < 0) {
    774                     fail("unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus);
    775                 } else {  // decoderStatus >= 0
    776                     if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
    777                             " (size=" + info.size + ")");
    778                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    779                         if (VERBOSE) Log.d(TAG, "output EOS");
    780                         outputDone = true;
    781                     }
    782 
    783                     // The ByteBuffers are null references, but we still get a nonzero size for
    784                     // the decoded data.
    785                     boolean doRender = (info.size != 0);
    786 
    787                     // As soon as we call releaseOutputBuffer, the buffer will be forwarded
    788                     // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
    789                     // that the texture will be available before the call returns, so we
    790                     // need to wait for the onFrameAvailable callback to fire.  If we don't
    791                     // wait, we risk dropping frames.
    792                     outputSurface.makeCurrent();
    793                     decoder.releaseOutputBuffer(decoderStatus, doRender);
    794                     if (doRender) {
    795                         assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
    796                                 info.presentationTimeUs);
    797                         if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
    798                         outputSurface.awaitNewImage();
    799                         outputSurface.drawImage();
    800                         if (!checkSurfaceFrame(checkIndex++)) {
    801                             badFrames++;
    802                         }
    803                     }
    804                 }
    805                 if (decoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) {
    806                     // Continue attempts to drain output.
    807                     continue;
    808                 }
    809 
    810                 // Decoder is drained, check to see if we've got a new buffer of output from
    811                 // the encoder.
    812                 if (!encoderDone) {
    813                     int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
    814                     if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
    815                         // no output available yet
    816                         if (VERBOSE) Log.d(TAG, "no output from encoder available");
    817                         encoderOutputAvailable = false;
    818                     } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    819                         // not expected for an encoder
    820                         encoderOutputBuffers = encoder.getOutputBuffers();
    821                         if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
    822                     } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    823                         // not expected for an encoder
    824                         MediaFormat newFormat = encoder.getOutputFormat();
    825                         if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
    826                     } else if (encoderStatus < 0) {
    827                         fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
    828                     } else { // encoderStatus >= 0
    829                         ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
    830                         if (encodedData == null) {
    831                             fail("encoderOutputBuffer " + encoderStatus + " was null");
    832                         }
    833 
    834                         // It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
    835                         encodedData.position(info.offset);
    836                         encodedData.limit(info.offset + info.size);
    837 
    838                         if (outputStream != null) {
    839                             byte[] data = new byte[info.size];
    840                             encodedData.get(data);
    841                             encodedData.position(info.offset);
    842                             try {
    843                                 outputStream.write(data);
    844                             } catch (IOException ioe) {
    845                                 Log.w(TAG, "failed writing debug data to file");
    846                                 throw new RuntimeException(ioe);
    847                             }
    848                         }
    849 
    850                         // Get a decoder input buffer, blocking until it's available.  We just
    851                         // drained the decoder output, so we expect there to be a free input
    852                         // buffer now or in the near future (i.e. this should never deadlock
    853                         // if the codec is meeting requirements).
    854                         //
    855                         // The first buffer of data we get will have the BUFFER_FLAG_CODEC_CONFIG
    856                         // flag set; the decoder will see this and finish configuring itself.
    857                         int inputBufIndex = decoder.dequeueInputBuffer(-1);
    858                         ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
    859                         inputBuf.clear();
    860                         inputBuf.put(encodedData);
    861                         decoder.queueInputBuffer(inputBufIndex, 0, info.size,
    862                                 info.presentationTimeUs, info.flags);
    863 
    864                         // If everything from the encoder has been passed to the decoder, we
    865                         // can stop polling the encoder output.  (This just an optimization.)
    866                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    867                             encoderDone = true;
    868                             encoderOutputAvailable = false;
    869                         }
    870                         if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder"
    871                                 + (encoderDone ? " (EOS)" : ""));
    872 
    873                         encoder.releaseOutputBuffer(encoderStatus, false);
    874                     }
    875                 }
    876             }
    877         }
    878 
    879         if (outputStream != null) {
    880             try {
    881                 outputStream.close();
    882             } catch (IOException ioe) {
    883                 Log.w(TAG, "failed closing debug file");
    884                 throw new RuntimeException(ioe);
    885             }
    886         }
    887 
    888         if (checkIndex != NUM_FRAMES) {
    889             fail("expected " + NUM_FRAMES + " frames, only decoded " + checkIndex);
    890         }
    891         if (badFrames != 0) {
    892             fail("Found " + badFrames + " bad frames");
    893         }
    894     }
    895 
    896 
    897     /**
    898      * Generates data for frame N into the supplied buffer.  We have an 8-frame animation
    899      * sequence that wraps around.  It looks like this:
    900      * <pre>
    901      *   0 1 2 3
    902      *   7 6 5 4
    903      * </pre>
    904      * We draw one of the eight rectangles and leave the rest set to the zero-fill color.
    905      */
    906     private void generateFrame(int frameIndex, int colorFormat, byte[] frameData) {
    907         final int HALF_WIDTH = mWidth / 2;
    908         boolean semiPlanar = isSemiPlanarYUV(colorFormat);
    909 
    910         // Set to zero.  In YUV this is a dull green.
    911         Arrays.fill(frameData, (byte) 0);
    912 
    913         int startX, startY, countX, countY;
    914 
    915         frameIndex %= 8;
    916         //frameIndex = (frameIndex / 8) % 8;    // use this instead for debug -- easier to see
    917         if (frameIndex < 4) {
    918             startX = frameIndex * (mWidth / 4);
    919             startY = 0;
    920         } else {
    921             startX = (7 - frameIndex) * (mWidth / 4);
    922             startY = mHeight / 2;
    923         }
    924 
    925         for (int y = startY + (mHeight/2) - 1; y >= startY; --y) {
    926             for (int x = startX + (mWidth/4) - 1; x >= startX; --x) {
    927                 if (semiPlanar) {
    928                     // full-size Y, followed by UV pairs at half resolution
    929                     // e.g. Nexus 4 OMX.qcom.video.encoder.avc COLOR_FormatYUV420SemiPlanar
    930                     // e.g. Galaxy Nexus OMX.TI.DUCATI1.VIDEO.H264E
    931                     //        OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
    932                     frameData[y * mWidth + x] = (byte) TEST_Y;
    933                     if ((x & 0x01) == 0 && (y & 0x01) == 0) {
    934                         frameData[mWidth*mHeight + y * HALF_WIDTH + x] = (byte) TEST_U;
    935                         frameData[mWidth*mHeight + y * HALF_WIDTH + x + 1] = (byte) TEST_V;
    936                     }
    937                 } else {
    938                     // full-size Y, followed by quarter-size U and quarter-size V
    939                     // e.g. Nexus 10 OMX.Exynos.AVC.Encoder COLOR_FormatYUV420Planar
    940                     // e.g. Nexus 7 OMX.Nvidia.h264.encoder COLOR_FormatYUV420Planar
    941                     frameData[y * mWidth + x] = (byte) TEST_Y;
    942                     if ((x & 0x01) == 0 && (y & 0x01) == 0) {
    943                         frameData[mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_U;
    944                         frameData[mWidth*mHeight + HALF_WIDTH * (mHeight / 2) +
    945                                   (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_V;
    946                     }
    947                 }
    948             }
    949         }
    950     }
    951 
    952     /**
    953      * Performs a simple check to see if the frame is more or less right.
    954      * <p>
    955      * See {@link #generateFrame} for a description of the layout.  The idea is to sample
    956      * one pixel from the middle of the 8 regions, and verify that the correct one has
    957      * the non-background color.  We can't know exactly what the video encoder has done
    958      * with our frames, so we just check to see if it looks like more or less the right thing.
    959      *
    960      * @return true if the frame looks good
    961      */
    962     private boolean checkFrame(int frameIndex, MediaFormat format, ByteBuffer frameData) {
    963         // Check for color formats we don't understand.  There is no requirement for video
    964         // decoders to use a "mundane" format, so we just give a pass on proprietary formats.
    965         // e.g. Nexus 4 0x7FA30C03 OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
    966         int colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
    967         if (!isRecognizedFormat(colorFormat)) {
    968             Log.d(TAG, "unable to check frame contents for colorFormat=" +
    969                     Integer.toHexString(colorFormat));
    970             return true;
    971         }
    972 
    973         boolean frameFailed = false;
    974         boolean semiPlanar = isSemiPlanarYUV(colorFormat);
    975         int width = format.getInteger(MediaFormat.KEY_WIDTH);
    976         int height = format.getInteger(MediaFormat.KEY_HEIGHT);
    977         int halfWidth = width / 2;
    978         int cropLeft = format.getInteger("crop-left");
    979         int cropRight = format.getInteger("crop-right");
    980         int cropTop = format.getInteger("crop-top");
    981         int cropBottom = format.getInteger("crop-bottom");
    982         int cropWidth = cropRight - cropLeft + 1;
    983         int cropHeight = cropBottom - cropTop + 1;
    984 
    985         assertEquals(mWidth, cropWidth);
    986         assertEquals(mHeight, cropHeight);
    987 
    988         for (int i = 0; i < 8; i++) {
    989             int x, y;
    990             if (i < 4) {
    991                 x = i * (mWidth / 4) + (mWidth / 8);
    992                 y = mHeight / 4;
    993             } else {
    994                 x = (7 - i) * (mWidth / 4) + (mWidth / 8);
    995                 y = (mHeight * 3) / 4;
    996             }
    997 
    998             y += cropTop;
    999             x += cropLeft;
   1000 
   1001             int testY, testU, testV;
   1002             if (semiPlanar) {
   1003                 // Galaxy Nexus uses OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
   1004                 testY = frameData.get(y * width + x) & 0xff;
   1005                 testU = frameData.get(width*height + 2*(y/2) * halfWidth + 2*(x/2)) & 0xff;
   1006                 testV = frameData.get(width*height + 2*(y/2) * halfWidth + 2*(x/2) + 1) & 0xff;
   1007             } else {
   1008                 // Nexus 10, Nexus 7 use COLOR_FormatYUV420Planar
   1009                 testY = frameData.get(y * width + x) & 0xff;
   1010                 testU = frameData.get(width*height + (y/2) * halfWidth + (x/2)) & 0xff;
   1011                 testV = frameData.get(width*height + halfWidth * (height / 2) +
   1012                         (y/2) * halfWidth + (x/2)) & 0xff;
   1013             }
   1014 
   1015             int expY, expU, expV;
   1016             if (i == frameIndex % 8) {
   1017                 // colored rect
   1018                 expY = TEST_Y;
   1019                 expU = TEST_U;
   1020                 expV = TEST_V;
   1021             } else {
   1022                 // should be our zeroed-out buffer
   1023                 expY = expU = expV = 0;
   1024             }
   1025             if (!isColorClose(testY, expY) ||
   1026                     !isColorClose(testU, expU) ||
   1027                     !isColorClose(testV, expV)) {
   1028                 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + ": yuv=" + testY +
   1029                         "," + testU + "," + testV + " vs. expected " + expY + "," + expU +
   1030                         "," + expV + ")");
   1031                 frameFailed = true;
   1032             }
   1033         }
   1034 
   1035         return !frameFailed;
   1036     }
   1037 
   1038     /**
   1039      * Generates a frame of data using GL commands.
   1040      */
   1041     private void generateSurfaceFrame(int frameIndex) {
   1042         frameIndex %= 8;
   1043 
   1044         int startX, startY;
   1045         if (frameIndex < 4) {
   1046             // (0,0) is bottom-left in GL
   1047             startX = frameIndex * (mWidth / 4);
   1048             startY = mHeight / 2;
   1049         } else {
   1050             startX = (7 - frameIndex) * (mWidth / 4);
   1051             startY = 0;
   1052         }
   1053 
   1054         GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
   1055         GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f);
   1056         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
   1057         GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
   1058         GLES20.glScissor(startX, startY, mWidth / 4, mHeight / 2);
   1059         GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f);
   1060         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
   1061     }
   1062 
   1063     /**
   1064      * Checks the frame for correctness.  Similar to {@link #checkFrame}, but uses GL to
   1065      * read pixels from the current surface.
   1066      *
   1067      * @return true if the frame looks good
   1068      */
   1069     private boolean checkSurfaceFrame(int frameIndex) {
   1070         ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this
   1071         boolean frameFailed = false;
   1072 
   1073         for (int i = 0; i < 8; i++) {
   1074             // Note the coordinates are inverted on the Y-axis in GL.
   1075             int x, y;
   1076             if (i < 4) {
   1077                 x = i * (mWidth / 4) + (mWidth / 8);
   1078                 y = (mHeight * 3) / 4;
   1079             } else {
   1080                 x = (7 - i) * (mWidth / 4) + (mWidth / 8);
   1081                 y = mHeight / 4;
   1082             }
   1083 
   1084             GLES20.glReadPixels(x, y, 1, 1, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixelBuf);
   1085             int r = pixelBuf.get(0) & 0xff;
   1086             int g = pixelBuf.get(1) & 0xff;
   1087             int b = pixelBuf.get(2) & 0xff;
   1088             //Log.d(TAG, "GOT(" + frameIndex + "/" + i + "): r=" + r + " g=" + g + " b=" + b);
   1089 
   1090             int expR, expG, expB;
   1091             if (i == frameIndex % 8) {
   1092                 // colored rect
   1093                 expR = TEST_R1;
   1094                 expG = TEST_G1;
   1095                 expB = TEST_B1;
   1096             } else {
   1097                 // zero background color
   1098                 expR = TEST_R0;
   1099                 expG = TEST_G0;
   1100                 expB = TEST_B0;
   1101             }
   1102             if (!isColorClose(r, expR) ||
   1103                     !isColorClose(g, expG) ||
   1104                     !isColorClose(b, expB)) {
   1105                 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + ": rgb=" + r +
   1106                         "," + g + "," + b + " vs. expected " + expR + "," + expG +
   1107                         "," + expB + ")");
   1108                 frameFailed = true;
   1109             }
   1110         }
   1111 
   1112         return !frameFailed;
   1113     }
   1114 
   1115     /**
   1116      * Returns true if the actual color value is close to the expected color value.  Updates
   1117      * mLargestColorDelta.
   1118      */
   1119     boolean isColorClose(int actual, int expected) {
   1120         final int MAX_DELTA = 8;
   1121         int delta = Math.abs(actual - expected);
   1122         if (delta > mLargestColorDelta) {
   1123             mLargestColorDelta = delta;
   1124         }
   1125         return (delta <= MAX_DELTA);
   1126     }
   1127 
   1128     /**
   1129      * Generates the presentation time for frame N, in microseconds.
   1130      */
   1131     private static long computePresentationTime(int frameIndex) {
   1132         return 132 + frameIndex * 1000000 / FRAME_RATE;
   1133     }
   1134 }
   1135