Home | History | Annotate | Download | only in cts
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.media.cts;
     18 
     19 import android.media.MediaCodec;
     20 import android.media.MediaCodecInfo;
     21 import android.media.MediaCodecList;
     22 import android.media.MediaFormat;
     23 import android.opengl.GLES20;
     24 import android.test.AndroidTestCase;
     25 import android.util.Log;
     26 
     27 import java.io.BufferedOutputStream;
     28 import java.io.File;
     29 import java.io.FileOutputStream;
     30 import java.io.IOException;
     31 import java.nio.ByteBuffer;
     32 import java.util.ArrayList;
     33 
     34 
     35 /**
     36  * This test has three steps:
     37  * <ol>
     38  *   <li>Generate a video test stream.
     39  *   <li>Decode the video from the stream, rendering frames into a SurfaceTexture.
     40  *       Render the texture onto a Surface that feeds a video encoder, modifying
     41  *       the output with a fragment shader.
     42  *   <li>Decode the second video and compare it to the expected result.
     43  * </ol><p>
     44  * The second step is a typical scenario for video editing.  We could do all this in one
     45  * step, feeding data through multiple stages of MediaCodec, but at some point we're
     46  * no longer exercising the code in the way we expect it to be used (and the code
     47  * gets a bit unwieldy).
     48  */
     49 public class DecodeEditEncodeTest extends AndroidTestCase {
     50     private static final String TAG = "DecodeEditEncode";
     51     private static final boolean WORK_AROUND_BUGS = false;  // avoid fatal codec bugs
     52     private static final boolean VERBOSE = false;           // lots of logging
     53     private static final boolean DEBUG_SAVE_FILE = false;   // save copy of encoded movie
     54 
     55     // parameters for the encoder
     56                                                             // H.264 Advanced Video Coding
     57     private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC;
     58     private static final int FRAME_RATE = 15;               // 15fps
     59     private static final int IFRAME_INTERVAL = 10;          // 10 seconds between I-frames
     60 
     61     // movie length, in frames
     62     private static final int NUM_FRAMES = 30;               // two seconds of video
     63 
     64     private static final int TEST_R0 = 0;                   // dull green background
     65     private static final int TEST_G0 = 136;
     66     private static final int TEST_B0 = 0;
     67     private static final int TEST_R1 = 236;                 // pink; BT.601 YUV {120,160,200}
     68     private static final int TEST_G1 = 50;
     69     private static final int TEST_B1 = 186;
     70 
     71     // Replaces TextureRender.FRAGMENT_SHADER during edit; swaps green and blue channels.
     72     private static final String FRAGMENT_SHADER =
     73             "#extension GL_OES_EGL_image_external : require\n" +
     74             "precision mediump float;\n" +
     75             "varying vec2 vTextureCoord;\n" +
     76             "uniform samplerExternalOES sTexture;\n" +
     77             "void main() {\n" +
     78             "  gl_FragColor = texture2D(sTexture, vTextureCoord).rbga;\n" +
     79             "}\n";
     80 
     81     // size of a frame, in pixels
     82     private int mWidth = -1;
     83     private int mHeight = -1;
     84     // bit rate, in bits per second
     85     private int mBitRate = -1;
     86 
     87     // largest color component delta seen (i.e. actual vs. expected)
     88     private int mLargestColorDelta;
     89 
     90 
     91     public void testVideoEditQCIF() throws Throwable {
     92         setParameters(176, 144, 1100000);
     93         VideoEditWrapper.runTest(this);
     94     }
     95     public void testVideoEditQVGA() throws Throwable {
     96         setParameters(320, 240, 2000000);
     97         VideoEditWrapper.runTest(this);
     98     }
     99     public void testVideoEdit720p() throws Throwable {
    100         setParameters(1280, 720, 6000000);
    101         VideoEditWrapper.runTest(this);
    102     }
    103 
    104     /**
    105      * Wraps testEditVideo, running it in a new thread.  Required because of the way
    106      * SurfaceTexture.OnFrameAvailableListener works when the current thread has a Looper
    107      * configured.
    108      */
    109     private static class VideoEditWrapper implements Runnable {
    110         private Throwable mThrowable;
    111         private DecodeEditEncodeTest mTest;
    112 
    113         private VideoEditWrapper(DecodeEditEncodeTest test) {
    114             mTest = test;
    115         }
    116 
    117         @Override
    118         public void run() {
    119             try {
    120                 mTest.videoEditTest();
    121             } catch (Throwable th) {
    122                 mThrowable = th;
    123             }
    124         }
    125 
    126         /** Entry point. */
    127         public static void runTest(DecodeEditEncodeTest obj) throws Throwable {
    128             VideoEditWrapper wrapper = new VideoEditWrapper(obj);
    129             Thread th = new Thread(wrapper, "codec test");
    130             th.start();
    131             th.join();
    132             if (wrapper.mThrowable != null) {
    133                 throw wrapper.mThrowable;
    134             }
    135         }
    136     }
    137 
    138     /**
    139      * Sets the desired frame size and bit rate.
    140      */
    141     private void setParameters(int width, int height, int bitRate) {
    142         if ((width % 16) != 0 || (height % 16) != 0) {
    143             Log.w(TAG, "WARNING: width or height not multiple of 16");
    144         }
    145         mWidth = width;
    146         mHeight = height;
    147         mBitRate = bitRate;
    148     }
    149 
    150     /**
    151      * Tests editing of a video file with GL.
    152      */
    153     private void videoEditTest()
    154             throws IOException {
    155         VideoChunks sourceChunks = new VideoChunks();
    156 
    157         if (!generateVideoFile(sourceChunks)) {
    158             // No AVC codec?  Fail silently.
    159             return;
    160         }
    161 
    162         if (DEBUG_SAVE_FILE) {
    163             // Save a copy to a file.  We call it ".mp4", but it's actually just an elementary
    164             // stream, so not all video players will know what to do with it.
    165             String dirName = getContext().getFilesDir().getAbsolutePath();
    166             String fileName = "vedit1_" + mWidth + "x" + mHeight + ".mp4";
    167             sourceChunks.saveToFile(new File(dirName, fileName));
    168         }
    169 
    170         VideoChunks destChunks = editVideoFile(sourceChunks);
    171 
    172         if (DEBUG_SAVE_FILE) {
    173             String dirName = getContext().getFilesDir().getAbsolutePath();
    174             String fileName = "vedit2_" + mWidth + "x" + mHeight + ".mp4";
    175             destChunks.saveToFile(new File(dirName, fileName));
    176         }
    177 
    178         checkVideoFile(destChunks);
    179     }
    180 
    181     /**
    182      * Generates a test video file, saving it as VideoChunks.  We generate frames with GL to
    183      * avoid having to deal with multiple YUV formats.
    184      *
    185      * @return true on success, false on "soft" failure
    186      */
    187     private boolean generateVideoFile(VideoChunks output)
    188             throws IOException {
    189         if (VERBOSE) Log.d(TAG, "generateVideoFile " + mWidth + "x" + mHeight);
    190         MediaCodec encoder = null;
    191         InputSurface inputSurface = null;
    192 
    193         try {
    194             // We avoid the device-specific limitations on width and height by using values that
    195             // are multiples of 16, which all tested devices seem to be able to handle.
    196             MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
    197 
    198             String codecName = selectCodec(format);
    199             if (codecName == null) {
    200                 // Don't fail CTS if they don't have an AVC codec (not here, anyway).
    201                 Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
    202                 return false;
    203             }
    204             if (VERBOSE) Log.d(TAG, "found codec: " + codecName);
    205 
    206             // Set some properties.  Failing to specify some of these can cause the MediaCodec
    207             // configure() call to throw an unhelpful exception.
    208             format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
    209                     MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    210             format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
    211             format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
    212             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
    213             if (VERBOSE) Log.d(TAG, "format: " + format);
    214             output.setMediaFormat(format);
    215 
    216             // Create a MediaCodec for the desired codec, then configure it as an encoder with
    217             // our desired properties.
    218             encoder = MediaCodec.createByCodecName(codecName);
    219             encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    220             inputSurface = new InputSurface(encoder.createInputSurface());
    221             inputSurface.makeCurrent();
    222             encoder.start();
    223 
    224             generateVideoData(encoder, inputSurface, output);
    225         } finally {
    226             if (encoder != null) {
    227                 if (VERBOSE) Log.d(TAG, "releasing encoder");
    228                 encoder.stop();
    229                 encoder.release();
    230                 if (VERBOSE) Log.d(TAG, "released encoder");
    231             }
    232             if (inputSurface != null) {
    233                 inputSurface.release();
    234             }
    235         }
    236 
    237         return true;
    238     }
    239 
    240     /**
    241      * Returns the first codec capable of encoding the specified MIME type, or null if no
    242      * match was found.
    243      */
    244     private static String selectCodec(MediaFormat format) {
    245         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
    246         return mcl.findEncoderForFormat(format);
    247     }
    248 
    249     /**
    250      * Generates video frames, feeds them into the encoder, and writes the output to the
    251      * VideoChunks instance.
    252      */
    253     private void generateVideoData(MediaCodec encoder, InputSurface inputSurface,
    254             VideoChunks output) {
    255         final int TIMEOUT_USEC = 10000;
    256         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
    257         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    258         int generateIndex = 0;
    259         int outputCount = 0;
    260 
    261         // Loop until the output side is done.
    262         boolean inputDone = false;
    263         boolean outputDone = false;
    264         while (!outputDone) {
    265             if (VERBOSE) Log.d(TAG, "gen loop");
    266 
    267             // If we're not done submitting frames, generate a new one and submit it.  The
    268             // eglSwapBuffers call will block if the input is full.
    269             if (!inputDone) {
    270                 if (generateIndex == NUM_FRAMES) {
    271                     // Send an empty frame with the end-of-stream flag set.
    272                     if (VERBOSE) Log.d(TAG, "signaling input EOS");
    273                     if (WORK_AROUND_BUGS) {
    274                         // Might drop a frame, but at least we won't crash mediaserver.
    275                         try { Thread.sleep(500); } catch (InterruptedException ie) {}
    276                         outputDone = true;
    277                     } else {
    278                         encoder.signalEndOfInputStream();
    279                     }
    280                     inputDone = true;
    281                 } else {
    282                     generateSurfaceFrame(generateIndex);
    283                     inputSurface.setPresentationTime(computePresentationTime(generateIndex) * 1000);
    284                     if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers");
    285                     inputSurface.swapBuffers();
    286                 }
    287                 generateIndex++;
    288             }
    289 
    290             // Check for output from the encoder.  If there's no output yet, we either need to
    291             // provide more input, or we need to wait for the encoder to work its magic.  We
    292             // can't actually tell which is the case, so if we can't get an output buffer right
    293             // away we loop around and see if it wants more input.
    294             //
    295             // If we do find output, drain it all before supplying more input.
    296             while (true) {
    297                 int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
    298                 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
    299                     // no output available yet
    300                     if (VERBOSE) Log.d(TAG, "no output from encoder available");
    301                     break;      // out of while
    302                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    303                     // not expected for an encoder
    304                     encoderOutputBuffers = encoder.getOutputBuffers();
    305                     if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
    306                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    307                     // expected on API 18+
    308                     MediaFormat newFormat = encoder.getOutputFormat();
    309                     if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
    310                 } else if (encoderStatus < 0) {
    311                     fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
    312                 } else { // encoderStatus >= 0
    313                     ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
    314                     if (encodedData == null) {
    315                         fail("encoderOutputBuffer " + encoderStatus + " was null");
    316                     }
    317 
    318                     // Codec config flag must be set iff this is the first chunk of output.  This
    319                     // may not hold for all codecs, but it appears to be the case for video/avc.
    320                     assertTrue((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 ||
    321                             outputCount != 0);
    322 
    323                     if (info.size != 0) {
    324                         // Adjust the ByteBuffer values to match BufferInfo.
    325                         encodedData.position(info.offset);
    326                         encodedData.limit(info.offset + info.size);
    327 
    328                         output.addChunk(encodedData, info.flags, info.presentationTimeUs);
    329                         outputCount++;
    330                     }
    331 
    332                     encoder.releaseOutputBuffer(encoderStatus, false);
    333                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    334                         outputDone = true;
    335                         break;      // out of while
    336                     }
    337                 }
    338             }
    339         }
    340 
    341         // One chunk per frame, plus one for the config data.
    342         assertEquals("Frame count", NUM_FRAMES + 1, outputCount);
    343     }
    344 
    345     /**
    346      * Generates a frame of data using GL commands.
    347      * <p>
    348      * We have an 8-frame animation sequence that wraps around.  It looks like this:
    349      * <pre>
    350      *   0 1 2 3
    351      *   7 6 5 4
    352      * </pre>
    353      * We draw one of the eight rectangles and leave the rest set to the zero-fill color.     */
    354     private void generateSurfaceFrame(int frameIndex) {
    355         frameIndex %= 8;
    356 
    357         int startX, startY;
    358         if (frameIndex < 4) {
    359             // (0,0) is bottom-left in GL
    360             startX = frameIndex * (mWidth / 4);
    361             startY = mHeight / 2;
    362         } else {
    363             startX = (7 - frameIndex) * (mWidth / 4);
    364             startY = 0;
    365         }
    366 
    367         GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
    368         GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f);
    369         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    370         GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
    371         GLES20.glScissor(startX, startY, mWidth / 4, mHeight / 2);
    372         GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f);
    373         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    374     }
    375 
    376     /**
    377      * Edits a video file, saving the contents to a new file.  This involves decoding and
    378      * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy.
    379      * <p>
    380      * If we recognize the decoded format we can do this in Java code using the ByteBuffer[]
    381      * output, but it's not practical to support all OEM formats.  By using a SurfaceTexture
    382      * for output and a Surface for input, we can avoid issues with obscure formats and can
    383      * use a fragment shader to do transformations.
    384      */
    385     private VideoChunks editVideoFile(VideoChunks inputData)
    386             throws IOException {
    387         if (VERBOSE) Log.d(TAG, "editVideoFile " + mWidth + "x" + mHeight);
    388         VideoChunks outputData = new VideoChunks();
    389         MediaCodec decoder = null;
    390         MediaCodec encoder = null;
    391         InputSurface inputSurface = null;
    392         OutputSurface outputSurface = null;
    393 
    394         try {
    395             MediaFormat inputFormat = inputData.getMediaFormat();
    396 
    397             // Create an encoder format that matches the input format.  (Might be able to just
    398             // re-use the format used to generate the video, since we want it to be the same.)
    399             MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
    400             outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
    401                     MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    402             outputFormat.setInteger(MediaFormat.KEY_BIT_RATE,
    403                     inputFormat.getInteger(MediaFormat.KEY_BIT_RATE));
    404             outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE,
    405                     inputFormat.getInteger(MediaFormat.KEY_FRAME_RATE));
    406             outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
    407                     inputFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL));
    408 
    409             outputData.setMediaFormat(outputFormat);
    410 
    411             encoder = MediaCodec.createEncoderByType(MIME_TYPE);
    412             encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    413             inputSurface = new InputSurface(encoder.createInputSurface());
    414             inputSurface.makeCurrent();
    415             encoder.start();
    416 
    417             // OutputSurface uses the EGL context created by InputSurface.
    418             decoder = MediaCodec.createDecoderByType(MIME_TYPE);
    419             outputSurface = new OutputSurface();
    420             outputSurface.changeFragmentShader(FRAGMENT_SHADER);
    421             decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
    422             decoder.start();
    423 
    424             editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData);
    425         } finally {
    426             if (VERBOSE) Log.d(TAG, "shutting down encoder, decoder");
    427             if (outputSurface != null) {
    428                 outputSurface.release();
    429             }
    430             if (inputSurface != null) {
    431                 inputSurface.release();
    432             }
    433             if (encoder != null) {
    434                 encoder.stop();
    435                 encoder.release();
    436             }
    437             if (decoder != null) {
    438                 decoder.stop();
    439                 decoder.release();
    440             }
    441         }
    442 
    443         return outputData;
    444     }
    445 
    446     /**
    447      * Edits a stream of video data.
    448      */
    449     private void editVideoData(VideoChunks inputData, MediaCodec decoder,
    450             OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder,
    451             VideoChunks outputData) {
    452         final int TIMEOUT_USEC = 10000;
    453         ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
    454         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
    455         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    456         int inputChunk = 0;
    457         int outputCount = 0;
    458 
    459         boolean outputDone = false;
    460         boolean inputDone = false;
    461         boolean decoderDone = false;
    462         while (!outputDone) {
    463             if (VERBOSE) Log.d(TAG, "edit loop");
    464 
    465             // Feed more data to the decoder.
    466             if (!inputDone) {
    467                 int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
    468                 if (inputBufIndex >= 0) {
    469                     if (inputChunk == inputData.getNumChunks()) {
    470                         // End of stream -- send empty frame with EOS flag set.
    471                         decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
    472                                 MediaCodec.BUFFER_FLAG_END_OF_STREAM);
    473                         inputDone = true;
    474                         if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)");
    475                     } else {
    476                         // Copy a chunk of input to the decoder.  The first chunk should have
    477                         // the BUFFER_FLAG_CODEC_CONFIG flag set.
    478                         ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
    479                         inputBuf.clear();
    480                         inputData.getChunkData(inputChunk, inputBuf);
    481                         int flags = inputData.getChunkFlags(inputChunk);
    482                         long time = inputData.getChunkTime(inputChunk);
    483                         decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(),
    484                                 time, flags);
    485                         if (VERBOSE) {
    486                             Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
    487                                     inputBuf.position() + " flags=" + flags);
    488                         }
    489                         inputChunk++;
    490                     }
    491                 } else {
    492                     if (VERBOSE) Log.d(TAG, "input buffer not available");
    493                 }
    494             }
    495 
    496             // Assume output is available.  Loop until both assumptions are false.
    497             boolean decoderOutputAvailable = !decoderDone;
    498             boolean encoderOutputAvailable = true;
    499             while (decoderOutputAvailable || encoderOutputAvailable) {
    500                 // Start by draining any pending output from the encoder.  It's important to
    501                 // do this before we try to stuff any more data in.
    502                 int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
    503                 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
    504                     // no output available yet
    505                     if (VERBOSE) Log.d(TAG, "no output from encoder available");
    506                     encoderOutputAvailable = false;
    507                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    508                     encoderOutputBuffers = encoder.getOutputBuffers();
    509                     if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
    510                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    511                     MediaFormat newFormat = encoder.getOutputFormat();
    512                     if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
    513                 } else if (encoderStatus < 0) {
    514                     fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
    515                 } else { // encoderStatus >= 0
    516                     ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
    517                     if (encodedData == null) {
    518                         fail("encoderOutputBuffer " + encoderStatus + " was null");
    519                     }
    520 
    521                     // Write the data to the output "file".
    522                     if (info.size != 0) {
    523                         encodedData.position(info.offset);
    524                         encodedData.limit(info.offset + info.size);
    525 
    526                         outputData.addChunk(encodedData, info.flags, info.presentationTimeUs);
    527                         outputCount++;
    528 
    529                         if (VERBOSE) Log.d(TAG, "encoder output " + info.size + " bytes");
    530                     }
    531                     outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
    532                     encoder.releaseOutputBuffer(encoderStatus, false);
    533                 }
    534                 if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) {
    535                     // Continue attempts to drain output.
    536                     continue;
    537                 }
    538 
    539                 // Encoder is drained, check to see if we've got a new frame of output from
    540                 // the decoder.  (The output is going to a Surface, rather than a ByteBuffer,
    541                 // but we still get information through BufferInfo.)
    542                 if (!decoderDone) {
    543                     int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
    544                     if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
    545                         // no output available yet
    546                         if (VERBOSE) Log.d(TAG, "no output from decoder available");
    547                         decoderOutputAvailable = false;
    548                     } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    549                         //decoderOutputBuffers = decoder.getOutputBuffers();
    550                         if (VERBOSE) Log.d(TAG, "decoder output buffers changed (we don't care)");
    551                     } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    552                         // expected before first buffer of data
    553                         MediaFormat newFormat = decoder.getOutputFormat();
    554                         if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
    555                     } else if (decoderStatus < 0) {
    556                         fail("unexpected result from decoder.dequeueOutputBuffer: "+decoderStatus);
    557                     } else { // decoderStatus >= 0
    558                         if (VERBOSE) Log.d(TAG, "surface decoder given buffer "
    559                                 + decoderStatus + " (size=" + info.size + ")");
    560                         // The ByteBuffers are null references, but we still get a nonzero
    561                         // size for the decoded data.
    562                         boolean doRender = (info.size != 0);
    563 
    564                         // As soon as we call releaseOutputBuffer, the buffer will be forwarded
    565                         // to SurfaceTexture to convert to a texture.  The API doesn't
    566                         // guarantee that the texture will be available before the call
    567                         // returns, so we need to wait for the onFrameAvailable callback to
    568                         // fire.  If we don't wait, we risk rendering from the previous frame.
    569                         decoder.releaseOutputBuffer(decoderStatus, doRender);
    570                         if (doRender) {
    571                             // This waits for the image and renders it after it arrives.
    572                             if (VERBOSE) Log.d(TAG, "awaiting frame");
    573                             outputSurface.awaitNewImage();
    574                             outputSurface.drawImage();
    575 
    576                             // Send it to the encoder.
    577                             inputSurface.setPresentationTime(info.presentationTimeUs * 1000);
    578                             if (VERBOSE) Log.d(TAG, "swapBuffers");
    579                             inputSurface.swapBuffers();
    580                         }
    581                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    582                             // forward decoder EOS to encoder
    583                             if (VERBOSE) Log.d(TAG, "signaling input EOS");
    584                             if (WORK_AROUND_BUGS) {
    585                                 // Bail early, possibly dropping a frame.
    586                                 return;
    587                             } else {
    588                                 encoder.signalEndOfInputStream();
    589                             }
    590                         }
    591                     }
    592                 }
    593             }
    594         }
    595 
    596         if (inputChunk != outputCount) {
    597             throw new RuntimeException("frame lost: " + inputChunk + " in, " +
    598                     outputCount + " out");
    599         }
    600     }
    601 
    602     /**
    603      * Checks the video file to see if the contents match our expectations.  We decode the
    604      * video to a Surface and check the pixels with GL.
    605      */
    606     private void checkVideoFile(VideoChunks inputData)
    607             throws IOException {
    608         OutputSurface surface = null;
    609         MediaCodec decoder = null;
    610 
    611         mLargestColorDelta = -1;
    612 
    613         if (VERBOSE) Log.d(TAG, "checkVideoFile");
    614 
    615         try {
    616             surface = new OutputSurface(mWidth, mHeight);
    617 
    618             MediaFormat format = inputData.getMediaFormat();
    619             decoder = MediaCodec.createDecoderByType(MIME_TYPE);
    620             decoder.configure(format, surface.getSurface(), null, 0);
    621             decoder.start();
    622 
    623             int badFrames = checkVideoData(inputData, decoder, surface);
    624             if (badFrames != 0) {
    625                 fail("Found " + badFrames + " bad frames");
    626             }
    627         } finally {
    628             if (surface != null) {
    629                 surface.release();
    630             }
    631             if (decoder != null) {
    632                 decoder.stop();
    633                 decoder.release();
    634             }
    635 
    636             Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
    637         }
    638     }
    639 
    640     /**
    641      * Checks the video data.
    642      *
    643      * @return the number of bad frames
    644      */
    645     private int checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface) {
    646         final int TIMEOUT_USEC = 1000;
    647         ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
    648         ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers();
    649         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    650         int inputChunk = 0;
    651         int checkIndex = 0;
    652         int badFrames = 0;
    653 
    654         boolean outputDone = false;
    655         boolean inputDone = false;
    656         while (!outputDone) {
    657             if (VERBOSE) Log.d(TAG, "check loop");
    658 
    659             // Feed more data to the decoder.
    660             if (!inputDone) {
    661                 int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
    662                 if (inputBufIndex >= 0) {
    663                     if (inputChunk == inputData.getNumChunks()) {
    664                         // End of stream -- send empty frame with EOS flag set.
    665                         decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
    666                                 MediaCodec.BUFFER_FLAG_END_OF_STREAM);
    667                         inputDone = true;
    668                         if (VERBOSE) Log.d(TAG, "sent input EOS");
    669                     } else {
    670                         // Copy a chunk of input to the decoder.  The first chunk should have
    671                         // the BUFFER_FLAG_CODEC_CONFIG flag set.
    672                         ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
    673                         inputBuf.clear();
    674                         inputData.getChunkData(inputChunk, inputBuf);
    675                         int flags = inputData.getChunkFlags(inputChunk);
    676                         long time = inputData.getChunkTime(inputChunk);
    677                         decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(),
    678                                 time, flags);
    679                         if (VERBOSE) {
    680                             Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
    681                                     inputBuf.position() + " flags=" + flags);
    682                         }
    683                         inputChunk++;
    684                     }
    685                 } else {
    686                     if (VERBOSE) Log.d(TAG, "input buffer not available");
    687                 }
    688             }
    689 
    690             if (!outputDone) {
    691                 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
    692                 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
    693                     // no output available yet
    694                     if (VERBOSE) Log.d(TAG, "no output from decoder available");
    695                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    696                     decoderOutputBuffers = decoder.getOutputBuffers();
    697                     if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
    698                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    699                     MediaFormat newFormat = decoder.getOutputFormat();
    700                     if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
    701                 } else if (decoderStatus < 0) {
    702                     fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
    703                 } else { // decoderStatus >= 0
    704                     ByteBuffer decodedData = decoderOutputBuffers[decoderStatus];
    705 
    706                     if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
    707                             " (size=" + info.size + ")");
    708                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    709                         if (VERBOSE) Log.d(TAG, "output EOS");
    710                         outputDone = true;
    711                     }
    712 
    713                     boolean doRender = (info.size != 0);
    714 
    715                     // As soon as we call releaseOutputBuffer, the buffer will be forwarded
    716                     // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
    717                     // that the texture will be available before the call returns, so we
    718                     // need to wait for the onFrameAvailable callback to fire.
    719                     decoder.releaseOutputBuffer(decoderStatus, doRender);
    720                     if (doRender) {
    721                         if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
    722                         assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
    723                                 info.presentationTimeUs);
    724                         surface.awaitNewImage();
    725                         surface.drawImage();
    726                         if (!checkSurfaceFrame(checkIndex++)) {
    727                             badFrames++;
    728                         }
    729                     }
    730                 }
    731             }
    732         }
    733 
    734         return badFrames;
    735     }
    736 
    737     /**
    738      * Checks the frame for correctness, using GL to check RGB values.
    739      *
    740      * @return true if the frame looks good
    741      */
    742     private boolean checkSurfaceFrame(int frameIndex) {
    743         ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this
    744         boolean frameFailed = false;
    745 
    746         for (int i = 0; i < 8; i++) {
    747             // Note the coordinates are inverted on the Y-axis in GL.
    748             int x, y;
    749             if (i < 4) {
    750                 x = i * (mWidth / 4) + (mWidth / 8);
    751                 y = (mHeight * 3) / 4;
    752             } else {
    753                 x = (7 - i) * (mWidth / 4) + (mWidth / 8);
    754                 y = mHeight / 4;
    755             }
    756 
    757             GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuf);
    758             int r = pixelBuf.get(0) & 0xff;
    759             int g = pixelBuf.get(1) & 0xff;
    760             int b = pixelBuf.get(2) & 0xff;
    761             //Log.d(TAG, "GOT(" + frameIndex + "/" + i + "): r=" + r + " g=" + g + " b=" + b);
    762 
    763             int expR, expG, expB;
    764             if (i == frameIndex % 8) {
    765                 // colored rect (green/blue swapped)
    766                 expR = TEST_R1;
    767                 expG = TEST_B1;
    768                 expB = TEST_G1;
    769             } else {
    770                 // zero background color (green/blue swapped)
    771                 expR = TEST_R0;
    772                 expG = TEST_B0;
    773                 expB = TEST_G0;
    774             }
    775             if (!isColorClose(r, expR) ||
    776                     !isColorClose(g, expG) ||
    777                     !isColorClose(b, expB)) {
    778                 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + ": rgb=" + r +
    779                         "," + g + "," + b + " vs. expected " + expR + "," + expG +
    780                         "," + expB + ")");
    781                 frameFailed = true;
    782             }
    783         }
    784 
    785         return !frameFailed;
    786     }
    787 
    788     /**
    789      * Returns true if the actual color value is close to the expected color value.  Updates
    790      * mLargestColorDelta.
    791      */
    792     boolean isColorClose(int actual, int expected) {
    793         final int MAX_DELTA = 8;
    794         int delta = Math.abs(actual - expected);
    795         if (delta > mLargestColorDelta) {
    796             mLargestColorDelta = delta;
    797         }
    798         return (delta <= MAX_DELTA);
    799     }
    800 
    801     /**
    802      * Generates the presentation time for frame N, in microseconds.
    803      */
    804     private static long computePresentationTime(int frameIndex) {
    805         return 123 + frameIndex * 1000000 / FRAME_RATE;
    806     }
    807 
    808 
    809     /**
    810      * The elementary stream coming out of the encoder needs to be fed back into
    811      * the decoder one chunk at a time.  If we just wrote the data to a file, we would lose
    812      * the information about chunk boundaries.  This class stores the encoded data in memory,
    813      * retaining the chunk organization.
    814      */
    815     private static class VideoChunks {
    816         private MediaFormat mMediaFormat;
    817         private ArrayList<byte[]> mChunks = new ArrayList<byte[]>();
    818         private ArrayList<Integer> mFlags = new ArrayList<Integer>();
    819         private ArrayList<Long> mTimes = new ArrayList<Long>();
    820 
    821         /**
    822          * Sets the MediaFormat, for the benefit of a future decoder.
    823          */
    824         public void setMediaFormat(MediaFormat format) {
    825             mMediaFormat = format;
    826         }
    827 
    828         /**
    829          * Gets the MediaFormat that was used by the encoder.
    830          */
    831         public MediaFormat getMediaFormat() {
    832             return mMediaFormat;
    833         }
    834 
    835         /**
    836          * Adds a new chunk.  Advances buf.position to buf.limit.
    837          */
    838         public void addChunk(ByteBuffer buf, int flags, long time) {
    839             byte[] data = new byte[buf.remaining()];
    840             buf.get(data);
    841             mChunks.add(data);
    842             mFlags.add(flags);
    843             mTimes.add(time);
    844         }
    845 
    846         /**
    847          * Returns the number of chunks currently held.
    848          */
    849         public int getNumChunks() {
    850             return mChunks.size();
    851         }
    852 
    853         /**
    854          * Copies the data from chunk N into "dest".  Advances dest.position.
    855          */
    856         public void getChunkData(int chunk, ByteBuffer dest) {
    857             byte[] data = mChunks.get(chunk);
    858             dest.put(data);
    859         }
    860 
    861         /**
    862          * Returns the flags associated with chunk N.
    863          */
    864         public int getChunkFlags(int chunk) {
    865             return mFlags.get(chunk);
    866         }
    867 
    868         /**
    869          * Returns the timestamp associated with chunk N.
    870          */
    871         public long getChunkTime(int chunk) {
    872             return mTimes.get(chunk);
    873         }
    874 
    875         /**
    876          * Writes the chunks to a file as a contiguous stream.  Useful for debugging.
    877          */
    878         public void saveToFile(File file) {
    879             Log.d(TAG, "saving chunk data to file " + file);
    880             FileOutputStream fos = null;
    881             BufferedOutputStream bos = null;
    882 
    883             try {
    884                 fos = new FileOutputStream(file);
    885                 bos = new BufferedOutputStream(fos);
    886                 fos = null;     // closing bos will also close fos
    887 
    888                 int numChunks = getNumChunks();
    889                 for (int i = 0; i < numChunks; i++) {
    890                     byte[] chunk = mChunks.get(i);
    891                     bos.write(chunk);
    892                 }
    893             } catch (IOException ioe) {
    894                 throw new RuntimeException(ioe);
    895             } finally {
    896                 try {
    897                     if (bos != null) {
    898                         bos.close();
    899                     }
    900                     if (fos != null) {
    901                         fos.close();
    902                     }
    903                 } catch (IOException ioe) {
    904                     throw new RuntimeException(ioe);
    905                 }
    906             }
    907         }
    908     }
    909 }
    910