Home | History | Annotate | Download | only in videoperf
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package com.android.cts.videoperf;
     18 
     19 import android.cts.util.MediaUtils;
     20 import android.cts.util.DeviceReportLog;
     21 import android.graphics.ImageFormat;
     22 import android.graphics.Point;
     23 import android.media.cts.CodecImage;
     24 import android.media.cts.CodecUtils;
     25 import android.media.Image;
     26 import android.media.Image.Plane;
     27 import android.media.MediaCodec;
     28 import android.media.MediaCodec.BufferInfo;
     29 import android.media.MediaCodecInfo;
     30 import android.media.MediaCodecInfo.CodecCapabilities;
     31 import android.media.MediaCodecList;
     32 import android.media.MediaFormat;
     33 import android.util.Log;
     34 import android.util.Pair;
     35 import android.util.Range;
     36 import android.util.Size;
     37 
     38 import android.cts.util.CtsAndroidTestCase;
     39 import com.android.cts.util.ResultType;
     40 import com.android.cts.util.ResultUnit;
     41 import com.android.cts.util.Stat;
     42 import com.android.cts.util.TimeoutReq;
     43 
     44 import java.io.IOException;
     45 import java.nio.ByteBuffer;
     46 import java.lang.System;
     47 import java.util.Arrays;
     48 import java.util.ArrayList;
     49 import java.util.LinkedList;
     50 import java.util.Random;
     51 import java.util.Vector;
     52 
     53 /**
     54  * This tries to test video encoder / decoder performance by running encoding / decoding
     55  * without displaying the raw data. To make things simpler, encoder is used to encode synthetic
     56  * data and decoder is used to decode the encoded video. This approach does not work where
     57  * there is only decoder. Performance index is total time taken for encoding and decoding
     58  * the whole frames.
     59  * To prevent sacrificing quality for faster encoding / decoding, randomly selected pixels are
     60  * compared with the original image. As the pixel comparison can slow down the decoding process,
     61  * only some randomly selected pixels are compared. As there can be only one performance index,
     62  * error above certain threshold in pixel value will be treated as an error.
     63  */
     64 public class VideoEncoderDecoderTest extends CtsAndroidTestCase {
     65     private static final String TAG = "VideoEncoderDecoderTest";
     66     // this wait time affects fps as too big value will work as a blocker if device fps
     67     // is not very high.
     68     private static final long VIDEO_CODEC_WAIT_TIME_US = 5000;
     69     private static final boolean VERBOSE = false;
     70     private static final String VIDEO_AVC = MediaFormat.MIMETYPE_VIDEO_AVC;
     71     private static final String VIDEO_VP8 = MediaFormat.MIMETYPE_VIDEO_VP8;
     72     private static final String VIDEO_H263 = MediaFormat.MIMETYPE_VIDEO_H263;
     73     private static final String VIDEO_MPEG4 = MediaFormat.MIMETYPE_VIDEO_MPEG4;
     74     private int mCurrentTestRound = 0;
     75     private double[][] mEncoderFrameTimeDiff = null;
     76     private double[][] mDecoderFrameTimeDiff = null;
     77     // i frame interval for encoder
     78     private static final int KEY_I_FRAME_INTERVAL = 5;
     79     private static final int MOVING_AVERAGE_NUM = 10;
     80 
     81     private static final int Y_CLAMP_MIN = 16;
     82     private static final int Y_CLAMP_MAX = 235;
     83     private static final int YUV_PLANE_ADDITIONAL_LENGTH = 200;
     84     private ByteBuffer mYBuffer, mYDirectBuffer;
     85     private ByteBuffer mUVBuffer, mUVDirectBuffer;
     86     private int mSrcColorFormat;
     87     private int mDstColorFormat;
     88     private int mBufferWidth;
     89     private int mBufferHeight;
     90     private int mVideoWidth;
     91     private int mVideoHeight;
     92     private int mFrameRate;
     93 
     94     private MediaFormat mEncInputFormat;
     95     private MediaFormat mEncOutputFormat;
     96     private MediaFormat mDecOutputFormat;
     97 
     98     private LinkedList<Pair<ByteBuffer, BufferInfo>> mEncodedOutputBuffer;
     99     // check this many pixels per each decoded frame
    100     // checking too many points decreases decoder frame rates a lot.
    101     private static final int PIXEL_CHECK_PER_FRAME = 1000;
    102     // RMS error in pixel values above this will be treated as error.
    103     private static final double PIXEL_RMS_ERROR_MARGAIN = 20.0;
    104     private double mRmsErrorMargain = PIXEL_RMS_ERROR_MARGAIN;
    105     private Random mRandom;
    106 
    107     private class TestConfig {
    108         public boolean mTestPixels = true;
    109         public boolean mTestResult = false;
    110         public boolean mReportFrameTime = false;
    111         public int mTotalFrames = 300;
    112         public int mMaxTimeMs = 120000;  // 2 minutes
    113         public int mNumberOfRepeat = 10;
    114     }
    115 
    116     private TestConfig mTestConfig;
    117 
    118     private DeviceReportLog mReportLog;
    119 
    120     @Override
    121     protected void setUp() throws Exception {
    122         mEncodedOutputBuffer = new LinkedList<Pair<ByteBuffer, BufferInfo>>();
    123         // Use time as a seed, hoping to prevent checking pixels in the same pattern
    124         long now = System.currentTimeMillis();
    125         mRandom = new Random(now);
    126         mTestConfig = new TestConfig();
    127         mReportLog = new DeviceReportLog();
    128         super.setUp();
    129     }
    130 
    131     @Override
    132     protected void tearDown() throws Exception {
    133         mEncodedOutputBuffer.clear();
    134         mEncodedOutputBuffer = null;
    135         mYBuffer = null;
    136         mUVBuffer = null;
    137         mYDirectBuffer = null;
    138         mUVDirectBuffer = null;
    139         mRandom = null;
    140         mTestConfig = null;
    141         mReportLog.deliverReportToHost(getInstrumentation());
    142         super.tearDown();
    143     }
    144 
    145     private String getEncoderName(String mime) {
    146         return getCodecName(mime, true /* isEncoder */);
    147     }
    148 
    149     private String getDecoderName(String mime) {
    150         return getCodecName(mime, false /* isEncoder */);
    151     }
    152 
    153     private String getCodecName(String mime, boolean isEncoder) {
    154         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
    155         for (MediaCodecInfo info : mcl.getCodecInfos()) {
    156             if (info.isEncoder() != isEncoder) {
    157                 continue;
    158             }
    159             CodecCapabilities caps = null;
    160             try {
    161                 caps = info.getCapabilitiesForType(mime);
    162             } catch (IllegalArgumentException e) {  // mime is not supported
    163                 continue;
    164             }
    165             return info.getName();
    166         }
    167         return null;
    168     }
    169 
    170     private String[] getEncoderName(String mime, boolean isGoog) {
    171         return getCodecName(mime, isGoog, true /* isEncoder */);
    172     }
    173 
    174     private String[] getDecoderName(String mime, boolean isGoog) {
    175         return getCodecName(mime, isGoog, false /* isEncoder */);
    176     }
    177 
    178     private String[] getCodecName(String mime, boolean isGoog, boolean isEncoder) {
    179         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
    180         ArrayList<String> result = new ArrayList<String>();
    181         for (MediaCodecInfo info : mcl.getCodecInfos()) {
    182             if (info.isEncoder() != isEncoder
    183                     || info.getName().toLowerCase().startsWith("omx.google.") != isGoog) {
    184                 continue;
    185             }
    186             CodecCapabilities caps = null;
    187             try {
    188                 caps = info.getCapabilitiesForType(mime);
    189             } catch (IllegalArgumentException e) {  // mime is not supported
    190                 continue;
    191             }
    192             result.add(info.getName());
    193         }
    194         return result.toArray(new String[result.size()]);
    195     }
    196 
    197     public void testAvc0176x0144() throws Exception {
    198         doTestDefault(VIDEO_AVC, 176, 144);
    199     }
    200 
    201     public void testAvc0352x0288() throws Exception {
    202         doTestDefault(VIDEO_AVC, 352, 288);
    203     }
    204 
    205     public void testAvc0720x0480() throws Exception {
    206         doTestDefault(VIDEO_AVC, 720, 480);
    207     }
    208 
    209     public void testAvc1280x0720() throws Exception {
    210         doTestDefault(VIDEO_AVC, 1280, 720);
    211     }
    212 
    213     /**
    214      * resolution intentionally set to 1072 not 1080
    215      * as 1080 is not multiple of 16, and it requires additional setting like stride
    216      * which is not specified in API documentation.
    217      */
    218     public void testAvc1920x1072() throws Exception {
    219         doTestDefault(VIDEO_AVC, 1920, 1072);
    220     }
    221 
    222     // Avc tests
    223     public void testAvc0320x0240Other() throws Exception {
    224         doTestOther(VIDEO_AVC, 320, 240);
    225     }
    226 
    227     public void testAvc0320x0240Goog() throws Exception {
    228         doTestGoog(VIDEO_AVC, 320, 240);
    229     }
    230 
    231     public void testAvc0720x0480Other() throws Exception {
    232         doTestOther(VIDEO_AVC, 720, 480);
    233     }
    234 
    235     public void testAvc0720x0480Goog() throws Exception {
    236         doTestGoog(VIDEO_AVC, 720, 480);
    237     }
    238 
    239     @TimeoutReq(minutes = 10)
    240     public void testAvc1280x0720Other() throws Exception {
    241         doTestOther(VIDEO_AVC, 1280, 720);
    242     }
    243 
    244     @TimeoutReq(minutes = 10)
    245     public void testAvc1280x0720Goog() throws Exception {
    246         doTestGoog(VIDEO_AVC, 1280, 720);
    247     }
    248 
    249     @TimeoutReq(minutes = 10)
    250     public void testAvc1920x1080Other() throws Exception {
    251         doTestOther(VIDEO_AVC, 1920, 1080);
    252     }
    253 
    254     @TimeoutReq(minutes = 10)
    255     public void testAvc1920x1080Goog() throws Exception {
    256         doTestGoog(VIDEO_AVC, 1920, 1080);
    257     }
    258 
    259     // Vp8 tests
    260     public void testVp80320x0180Other() throws Exception {
    261         doTestOther(VIDEO_VP8, 320, 180);
    262     }
    263 
    264     public void testVp80320x0180Goog() throws Exception {
    265         doTestGoog(VIDEO_VP8, 320, 180);
    266     }
    267 
    268     public void testVp80640x0360Other() throws Exception {
    269         doTestOther(VIDEO_VP8, 640, 360);
    270     }
    271 
    272     public void testVp80640x0360Goog() throws Exception {
    273         doTestGoog(VIDEO_VP8, 640, 360);
    274     }
    275 
    276     @TimeoutReq(minutes = 10)
    277     public void testVp81280x0720Other() throws Exception {
    278         doTestOther(VIDEO_VP8, 1280, 720);
    279     }
    280 
    281     @TimeoutReq(minutes = 10)
    282     public void testVp81280x0720Goog() throws Exception {
    283         doTestGoog(VIDEO_VP8, 1280, 720);
    284     }
    285 
    286     @TimeoutReq(minutes = 10)
    287     public void testVp81920x1080Other() throws Exception {
    288         doTestOther(VIDEO_VP8, 1920, 1080);
    289     }
    290 
    291     @TimeoutReq(minutes = 10)
    292     public void testVp81920x1080Goog() throws Exception {
    293         doTestGoog(VIDEO_VP8, 1920, 1080);
    294     }
    295 
    296     // H263 tests
    297     public void testH2630176x0144Other() throws Exception {
    298         doTestOther(VIDEO_H263, 176, 144);
    299     }
    300 
    301     public void testH2630176x0144Goog() throws Exception {
    302         doTestGoog(VIDEO_H263, 176, 144);
    303     }
    304 
    305     public void testH2630352x0288Other() throws Exception {
    306         doTestOther(VIDEO_H263, 352, 288);
    307     }
    308 
    309     public void testH2630352x0288Goog() throws Exception {
    310         doTestGoog(VIDEO_H263, 352, 288);
    311     }
    312 
    313     // Mpeg4 tests
    314     public void testMpeg40176x0144Other() throws Exception {
    315         doTestOther(VIDEO_MPEG4, 176, 144);
    316     }
    317 
    318     public void testMpeg40176x0144Goog() throws Exception {
    319         doTestGoog(VIDEO_MPEG4, 176, 144);
    320     }
    321 
    322     public void testMpeg40352x0288Other() throws Exception {
    323         doTestOther(VIDEO_MPEG4, 352, 288);
    324     }
    325 
    326     public void testMpeg40352x0288Goog() throws Exception {
    327         doTestGoog(VIDEO_MPEG4, 352, 288);
    328     }
    329 
    330     public void testMpeg40640x0480Other() throws Exception {
    331         doTestOther(VIDEO_MPEG4, 640, 480);
    332     }
    333 
    334     public void testMpeg40640x0480Goog() throws Exception {
    335         doTestGoog(VIDEO_MPEG4, 640, 480);
    336     }
    337 
    338     @TimeoutReq(minutes = 10)
    339     public void testMpeg41280x0720Other() throws Exception {
    340         doTestOther(VIDEO_MPEG4, 1280, 720);
    341     }
    342 
    343     @TimeoutReq(minutes = 10)
    344     public void testMpeg41280x0720Goog() throws Exception {
    345         doTestGoog(VIDEO_MPEG4, 1280, 720);
    346     }
    347 
    348     private boolean isSrcSemiPlanar() {
    349         return mSrcColorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
    350     }
    351 
    352     private boolean isSrcFlexYUV() {
    353         return mSrcColorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible;
    354     }
    355 
    356     private boolean isDstSemiPlanar() {
    357         return mDstColorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
    358     }
    359 
    360     private boolean isDstFlexYUV() {
    361         return mDstColorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible;
    362     }
    363 
    364     private static int getColorFormat(CodecInfo info) {
    365         if (info.mSupportSemiPlanar) {
    366             return CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
    367         } else if (info.mSupportPlanar) {
    368             return CodecCapabilities.COLOR_FormatYUV420Planar;
    369         } else {
    370             // FlexYUV must be supported
    371             return CodecCapabilities.COLOR_FormatYUV420Flexible;
    372         }
    373     }
    374 
    375     private void doTestGoog(String mimeType, int w, int h) throws Exception {
    376         mTestConfig.mTestPixels = false;
    377         mTestConfig.mTestResult = true;
    378         mTestConfig.mTotalFrames = 3000;
    379         mTestConfig.mNumberOfRepeat = 2;
    380         doTest(true /* isGoog */, mimeType, w, h);
    381     }
    382 
    383     private void doTestOther(String mimeType, int w, int h) throws Exception {
    384         mTestConfig.mTestPixels = false;
    385         mTestConfig.mTestResult = true;
    386         mTestConfig.mTotalFrames = 3000;
    387         mTestConfig.mNumberOfRepeat = 2;
    388         doTest(false /* isGoog */, mimeType, w, h);
    389     }
    390 
    391     private void doTestDefault(String mimeType, int w, int h) throws Exception {
    392         String encoderName = getEncoderName(mimeType);
    393         if (encoderName == null) {
    394             Log.i(TAG, "Encoder for " + mimeType + " not found");
    395             return;
    396         }
    397 
    398         String decoderName = getDecoderName(mimeType);
    399         if (decoderName == null) {
    400             Log.i(TAG, "Encoder for " + mimeType + " not found");
    401             return;
    402         }
    403 
    404         doTestByName(encoderName, decoderName, mimeType, w, h);
    405     }
    406 
    407     /**
    408      * Run encoding / decoding test for given mimeType of codec
    409      * @param isGoog test google or non-google codec.
    410      * @param mimeType like video/avc
    411      * @param w video width
    412      * @param h video height
    413      */
    414     private void doTest(boolean isGoog, String mimeType, int w, int h)
    415             throws Exception {
    416         String[] encoderNames = getEncoderName(mimeType, isGoog);
    417         if (encoderNames.length == 0) {
    418             Log.i(TAG, isGoog ? "Google " : "Non-google "
    419                     + "encoder for " + mimeType + " not found");
    420             return;
    421         }
    422 
    423         String[] decoderNames = getDecoderName(mimeType, isGoog);
    424         if (decoderNames.length == 0) {
    425             Log.i(TAG, isGoog ? "Google " : "Non-google "
    426                     + "decoder for " + mimeType + " not found");
    427             return;
    428         }
    429 
    430         for (String encoderName: encoderNames) {
    431             for (String decoderName: decoderNames) {
    432                 doTestByName(encoderName, decoderName, mimeType, w, h);
    433             }
    434         }
    435     }
    436 
    437     private void doTestByName(
    438             String encoderName, String decoderName, String mimeType, int w, int h)
    439             throws Exception {
    440         CodecInfo infoEnc = CodecInfo.getSupportedFormatInfo(encoderName, mimeType, w, h);
    441         if (infoEnc == null) {
    442             Log.i(TAG, "Encoder " + mimeType + " with " + w + "," + h + " not supported");
    443             return;
    444         }
    445         CodecInfo infoDec = CodecInfo.getSupportedFormatInfo(decoderName, mimeType, w, h);
    446         assertNotNull(infoDec);
    447         mVideoWidth = w;
    448         mVideoHeight = h;
    449 
    450         mSrcColorFormat = getColorFormat(infoEnc);
    451         mDstColorFormat = getColorFormat(infoDec);
    452         Log.i(TAG, "Testing video resolution " + w + "x" + h +
    453                    ": enc format " + mSrcColorFormat +
    454                    ", dec format " + mDstColorFormat);
    455 
    456         initYUVPlane(w + YUV_PLANE_ADDITIONAL_LENGTH, h + YUV_PLANE_ADDITIONAL_LENGTH);
    457         mEncoderFrameTimeDiff =
    458                 new double[mTestConfig.mNumberOfRepeat][mTestConfig.mTotalFrames - 1];
    459         mDecoderFrameTimeDiff =
    460                 new double[mTestConfig.mNumberOfRepeat][mTestConfig.mTotalFrames - 1];
    461         double[] encoderFpsResults = new double[mTestConfig.mNumberOfRepeat];
    462         double[] decoderFpsResults = new double[mTestConfig.mNumberOfRepeat];
    463         double[] totalFpsResults = new double[mTestConfig.mNumberOfRepeat];
    464         double[] decoderRmsErrorResults = new double[mTestConfig.mNumberOfRepeat];
    465         boolean success = true;
    466         for (int i = 0; i < mTestConfig.mNumberOfRepeat && success; i++) {
    467             mCurrentTestRound = i;
    468             MediaFormat format = new MediaFormat();
    469             format.setString(MediaFormat.KEY_MIME, mimeType);
    470             format.setInteger(MediaFormat.KEY_BIT_RATE, infoEnc.mBitRate);
    471             format.setInteger(MediaFormat.KEY_WIDTH, w);
    472             format.setInteger(MediaFormat.KEY_HEIGHT, h);
    473             format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mSrcColorFormat);
    474             format.setInteger(MediaFormat.KEY_FRAME_RATE, infoEnc.mFps);
    475             mFrameRate = infoEnc.mFps;
    476             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, KEY_I_FRAME_INTERVAL);
    477 
    478             double encodingTime = runEncoder(encoderName, format, mTestConfig.mTotalFrames);
    479             // re-initialize format for decoder
    480             format = new MediaFormat();
    481             format.setString(MediaFormat.KEY_MIME, mimeType);
    482             format.setInteger(MediaFormat.KEY_WIDTH, w);
    483             format.setInteger(MediaFormat.KEY_HEIGHT, h);
    484             format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDstColorFormat);
    485             double[] decoderResult = runDecoder(decoderName, format);
    486             if (decoderResult == null) {
    487                 success = false;
    488             } else {
    489                 double decodingTime = decoderResult[0];
    490                 decoderRmsErrorResults[i] = decoderResult[1];
    491                 encoderFpsResults[i] = (double)mTestConfig.mTotalFrames / encodingTime * 1000.0;
    492                 decoderFpsResults[i] = (double)mTestConfig.mTotalFrames / decodingTime * 1000.0;
    493                 totalFpsResults[i] =
    494                         (double)mTestConfig.mTotalFrames / (encodingTime + decodingTime) * 1000.0;
    495             }
    496 
    497             // clear things for re-start
    498             mEncodedOutputBuffer.clear();
    499             // it will be good to clean everything to make every run the same.
    500             System.gc();
    501         }
    502         mReportLog.printArray("encoder", encoderFpsResults, ResultType.HIGHER_BETTER,
    503                 ResultUnit.FPS);
    504         mReportLog.printArray("rms error", decoderRmsErrorResults, ResultType.LOWER_BETTER,
    505                 ResultUnit.NONE);
    506         mReportLog.printArray("decoder", decoderFpsResults, ResultType.HIGHER_BETTER,
    507                 ResultUnit.FPS);
    508         mReportLog.printArray("encoder decoder", totalFpsResults, ResultType.HIGHER_BETTER,
    509                 ResultUnit.FPS);
    510         mReportLog.printValue(mimeType + " encoder average fps for " + w + "x" + h,
    511                 Stat.getAverage(encoderFpsResults), ResultType.HIGHER_BETTER, ResultUnit.FPS);
    512         mReportLog.printValue(mimeType + " decoder average fps for " + w + "x" + h,
    513                 Stat.getAverage(decoderFpsResults), ResultType.HIGHER_BETTER, ResultUnit.FPS);
    514         mReportLog.printSummary("encoder decoder", Stat.getAverage(totalFpsResults),
    515                 ResultType.HIGHER_BETTER, ResultUnit.FPS);
    516 
    517         boolean encTestPassed = false;
    518         boolean decTestPassed = false;
    519         double[] measuredFps = new double[mTestConfig.mNumberOfRepeat];
    520         String[] resultRawData = new String[mTestConfig.mNumberOfRepeat];
    521         for (int i = 0; i < mTestConfig.mNumberOfRepeat; i++) {
    522             // make sure that rms error is not too big.
    523             if (decoderRmsErrorResults[i] >= mRmsErrorMargain) {
    524                 fail("rms error is bigger than the limit "
    525                         + decoderRmsErrorResults[i] + " vs " + mRmsErrorMargain);
    526             }
    527 
    528             if (mTestConfig.mReportFrameTime) {
    529                 mReportLog.printValue(
    530                         "encodertest#" + i + ": " + Arrays.toString(mEncoderFrameTimeDiff[i]),
    531                         0, ResultType.NEUTRAL, ResultUnit.NONE);
    532                 mReportLog.printValue(
    533                         "decodertest#" + i + ": " + Arrays.toString(mDecoderFrameTimeDiff[i]),
    534                         0, ResultType.NEUTRAL, ResultUnit.NONE);
    535             }
    536 
    537             if (mTestConfig.mTestResult) {
    538                 double[] avgs = MediaUtils.calculateMovingAverage(
    539                         mEncoderFrameTimeDiff[i], MOVING_AVERAGE_NUM);
    540                 double encMin = Stat.getMin(avgs);
    541                 double encMax = Stat.getMax(avgs);
    542                 double encAvg = MediaUtils.getAverage(mEncoderFrameTimeDiff[i]);
    543                 double encStdev = MediaUtils.getStdev(avgs);
    544                 String prefix = "codec=" + encoderName + " round=" + i +
    545                         " EncInputFormat=" + mEncInputFormat +
    546                         " EncOutputFormat=" + mEncOutputFormat;
    547                 String result =
    548                         MediaUtils.logResults(mReportLog, prefix, encMin, encMax, encAvg, encStdev);
    549                 double measuredEncFps = 1000000000 / encMin;
    550                 resultRawData[i] = result;
    551                 measuredFps[i] = measuredEncFps;
    552                 if (!encTestPassed) {
    553                     encTestPassed = MediaUtils.verifyResults(
    554                             encoderName, mimeType, w, h, measuredEncFps);
    555                 }
    556 
    557                 avgs = MediaUtils.calculateMovingAverage(
    558                         mDecoderFrameTimeDiff[i], MOVING_AVERAGE_NUM);
    559                 double decMin = Stat.getMin(avgs);
    560                 double decMax = Stat.getMax(avgs);
    561                 double decAvg = MediaUtils.getAverage(mDecoderFrameTimeDiff[i]);
    562                 double decStdev = MediaUtils.getStdev(avgs);
    563                 prefix = "codec=" + decoderName + " size=" + w + "x" + h + " round=" + i +
    564                         " DecOutputFormat=" + mDecOutputFormat;
    565                 MediaUtils.logResults(mReportLog, prefix, decMin, decMax, decAvg, decStdev);
    566                 double measuredDecFps = 1000000000 / decMin;
    567                 if (!decTestPassed) {
    568                     decTestPassed = MediaUtils.verifyResults(
    569                             decoderName, mimeType, w, h, measuredDecFps);
    570                 }
    571             }
    572         }
    573 
    574         if (mTestConfig.mTestResult) {
    575             if (!encTestPassed) {
    576                 Range<Double> reportedRange =
    577                     MediaUtils.getAchievableFrameRatesFor(encoderName, mimeType, w, h);
    578                 String failMessage =
    579                     MediaUtils.getErrorMessage(reportedRange, measuredFps, resultRawData);
    580                 fail(failMessage);
    581             }
    582             // Decoder result will be verified in VideoDecoderPerfTest
    583             // if (!decTestPassed) {
    584             //     fail("Measured fps for " + decoderName +
    585             //             " doesn't match with reported achievable frame rates.");
    586             // }
    587         }
    588         measuredFps = null;
    589         resultRawData = null;
    590     }
    591 
    592     /**
    593      * run encoder benchmarking
    594      * @param encoderName encoder name
    595      * @param format format of media to encode
    596      * @param totalFrames total number of frames to encode
    597      * @return time taken in ms to encode the frames. This does not include initialization time.
    598      */
    599     private double runEncoder(String encoderName, MediaFormat format, int totalFrames) {
    600         MediaCodec codec = null;
    601         try {
    602             codec = MediaCodec.createByCodecName(encoderName);
    603             codec.configure(
    604                     format,
    605                     null /* surface */,
    606                     null /* crypto */,
    607                     MediaCodec.CONFIGURE_FLAG_ENCODE);
    608         } catch (IllegalStateException e) {
    609             Log.e(TAG, "codec '" + encoderName + "' failed configuration.");
    610             codec.release();
    611             assertTrue("codec '" + encoderName + "' failed configuration.", false);
    612         } catch (IOException | NullPointerException e) {
    613             Log.i(TAG, "could not find codec for " + format);
    614             return Double.NaN;
    615         }
    616         codec.start();
    617         mEncInputFormat = codec.getInputFormat();
    618         ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
    619 
    620         int numBytesSubmitted = 0;
    621         int numBytesDequeued = 0;
    622         int inFramesCount = 0;
    623         long lastOutputTimeNs = 0;
    624         long start = System.currentTimeMillis();
    625         while (true) {
    626             int index;
    627 
    628             if (inFramesCount < totalFrames) {
    629                 index = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */);
    630                 if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
    631                     int size;
    632                     boolean eos = (inFramesCount == (totalFrames - 1));
    633                     if (!eos && ((System.currentTimeMillis() - start) > mTestConfig.mMaxTimeMs)) {
    634                         eos = true;
    635                     }
    636                     // when encoder only supports flexYUV, use Image only; otherwise,
    637                     // use ByteBuffer & Image each on half of the frames to test both
    638                     if (isSrcFlexYUV() || inFramesCount % 2 == 0) {
    639                         Image image = codec.getInputImage(index);
    640                         // image should always be available
    641                         assertTrue(image != null);
    642                         size = queueInputImageEncoder(
    643                                 codec, image, index, inFramesCount,
    644                                 eos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
    645                     } else {
    646                         ByteBuffer buffer = codec.getInputBuffer(index);
    647                         size = queueInputBufferEncoder(
    648                                 codec, buffer, index, inFramesCount,
    649                                 eos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
    650                     }
    651                     inFramesCount++;
    652                     numBytesSubmitted += size;
    653                     if (VERBOSE) {
    654                         Log.d(TAG, "queued " + size + " bytes of input data, frame " +
    655                                 (inFramesCount - 1));
    656                     }
    657 
    658                 }
    659             }
    660             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    661             index = codec.dequeueOutputBuffer(info, VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */);
    662             if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
    663             } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    664                 mEncOutputFormat = codec.getOutputFormat();
    665             } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    666                 codecOutputBuffers = codec.getOutputBuffers();
    667             } else if (index >= 0) {
    668                 if (lastOutputTimeNs > 0) {
    669                     int pos = mEncodedOutputBuffer.size() - 1;
    670                     if (pos < mEncoderFrameTimeDiff[mCurrentTestRound].length) {
    671                         long diff = System.nanoTime() - lastOutputTimeNs;
    672                         mEncoderFrameTimeDiff[mCurrentTestRound][pos] = diff;
    673                     }
    674                 }
    675                 lastOutputTimeNs = System.nanoTime();
    676 
    677                 dequeueOutputBufferEncoder(codec, codecOutputBuffers, index, info);
    678                 numBytesDequeued += info.size;
    679                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    680                     if (VERBOSE) {
    681                         Log.d(TAG, "dequeued output EOS.");
    682                     }
    683                     break;
    684                 }
    685                 if (VERBOSE) {
    686                     Log.d(TAG, "dequeued " + info.size + " bytes of output data.");
    687                 }
    688             }
    689         }
    690         long finish = System.currentTimeMillis();
    691         int validDataNum = Math.min(mEncodedOutputBuffer.size() - 1,
    692                 mEncoderFrameTimeDiff[mCurrentTestRound].length);
    693         mEncoderFrameTimeDiff[mCurrentTestRound] =
    694                 Arrays.copyOf(mEncoderFrameTimeDiff[mCurrentTestRound], validDataNum);
    695         if (VERBOSE) {
    696             Log.d(TAG, "queued a total of " + numBytesSubmitted + "bytes, "
    697                     + "dequeued " + numBytesDequeued + " bytes.");
    698         }
    699         codec.stop();
    700         codec.release();
    701         codec = null;
    702         return (double)(finish - start);
    703     }
    704 
    705     /**
    706      * Fills input buffer for encoder from YUV buffers.
    707      * @return size of enqueued data.
    708      */
    709     private int queueInputBufferEncoder(
    710             MediaCodec codec, ByteBuffer buffer, int index, int frameCount, int flags) {
    711         buffer.clear();
    712 
    713         Point origin = getOrigin(frameCount);
    714         // Y color first
    715         int srcOffsetY = origin.x + origin.y * mBufferWidth;
    716         final byte[] yBuffer = mYBuffer.array();
    717         for (int i = 0; i < mVideoHeight; i++) {
    718             buffer.put(yBuffer, srcOffsetY, mVideoWidth);
    719             srcOffsetY += mBufferWidth;
    720         }
    721         if (isSrcSemiPlanar()) {
    722             int srcOffsetU = origin.y / 2 * mBufferWidth + origin.x / 2 * 2;
    723             final byte[] uvBuffer = mUVBuffer.array();
    724             for (int i = 0; i < mVideoHeight / 2; i++) {
    725                 buffer.put(uvBuffer, srcOffsetU, mVideoWidth);
    726                 srcOffsetU += mBufferWidth;
    727             }
    728         } else {
    729             int srcOffsetU = origin.y / 2 * mBufferWidth / 2 + origin.x / 2;
    730             int srcOffsetV = srcOffsetU + mBufferWidth / 2 * mBufferHeight / 2;
    731             final byte[] uvBuffer = mUVBuffer.array();
    732             for (int i = 0; i < mVideoHeight / 2; i++) { //U only
    733                 buffer.put(uvBuffer, srcOffsetU, mVideoWidth / 2);
    734                 srcOffsetU += mBufferWidth / 2;
    735             }
    736             for (int i = 0; i < mVideoHeight / 2; i++) { //V only
    737                 buffer.put(uvBuffer, srcOffsetV, mVideoWidth / 2);
    738                 srcOffsetV += mBufferWidth / 2;
    739             }
    740         }
    741         int size = mVideoHeight * mVideoWidth * 3 / 2;
    742         long ptsUsec = computePresentationTime(frameCount);
    743 
    744         codec.queueInputBuffer(index, 0 /* offset */, size, ptsUsec /* timeUs */, flags);
    745         if (VERBOSE && (frameCount == 0)) {
    746             printByteArray("Y ", mYBuffer.array(), 0, 20);
    747             printByteArray("UV ", mUVBuffer.array(), 0, 20);
    748             printByteArray("UV ", mUVBuffer.array(), mBufferWidth * 60, 20);
    749         }
    750         return size;
    751     }
    752 
    753     class YUVImage extends CodecImage {
    754         private final int mImageWidth;
    755         private final int mImageHeight;
    756         private final Plane[] mPlanes;
    757 
    758         YUVImage(
    759                 Point origin,
    760                 int imageWidth, int imageHeight,
    761                 int arrayWidth, int arrayHeight,
    762                 boolean semiPlanar,
    763                 ByteBuffer bufferY, ByteBuffer bufferUV) {
    764             mImageWidth = imageWidth;
    765             mImageHeight = imageHeight;
    766             ByteBuffer dupY = bufferY.duplicate();
    767             ByteBuffer dupUV = bufferUV.duplicate();
    768             mPlanes = new Plane[3];
    769 
    770             int srcOffsetY = origin.x + origin.y * arrayWidth;
    771 
    772             mPlanes[0] = new YUVPlane(
    773                         mImageWidth, mImageHeight, arrayWidth, 1,
    774                         dupY, srcOffsetY);
    775 
    776             if (semiPlanar) {
    777                 int srcOffsetUV = origin.y / 2 * arrayWidth + origin.x / 2 * 2;
    778 
    779                 mPlanes[1] = new YUVPlane(
    780                         mImageWidth / 2, mImageHeight / 2, arrayWidth, 2,
    781                         dupUV, srcOffsetUV);
    782                 mPlanes[2] = new YUVPlane(
    783                         mImageWidth / 2, mImageHeight / 2, arrayWidth, 2,
    784                         dupUV, srcOffsetUV + 1);
    785             } else {
    786                 int srcOffsetU = origin.y / 2 * arrayWidth / 2 + origin.x / 2;
    787                 int srcOffsetV = srcOffsetU + arrayWidth / 2 * arrayHeight / 2;
    788 
    789                 mPlanes[1] = new YUVPlane(
    790                         mImageWidth / 2, mImageHeight / 2, arrayWidth / 2, 1,
    791                         dupUV, srcOffsetU);
    792                 mPlanes[2] = new YUVPlane(
    793                         mImageWidth / 2, mImageHeight / 2, arrayWidth / 2, 1,
    794                         dupUV, srcOffsetV);
    795             }
    796         }
    797 
    798         @Override
    799         public int getFormat() {
    800             return ImageFormat.YUV_420_888;
    801         }
    802 
    803         @Override
    804         public int getWidth() {
    805             return mImageWidth;
    806         }
    807 
    808         @Override
    809         public int getHeight() {
    810             return mImageHeight;
    811         }
    812 
    813         @Override
    814         public long getTimestamp() {
    815             return 0;
    816         }
    817 
    818         @Override
    819         public Plane[] getPlanes() {
    820             return mPlanes;
    821         }
    822 
    823         @Override
    824         public void close() {
    825             mPlanes[0] = null;
    826             mPlanes[1] = null;
    827             mPlanes[2] = null;
    828         }
    829 
    830         class YUVPlane extends CodecImage.Plane {
    831             private final int mRowStride;
    832             private final int mPixelStride;
    833             private final ByteBuffer mByteBuffer;
    834 
    835             YUVPlane(int w, int h, int rowStride, int pixelStride,
    836                     ByteBuffer buffer, int offset) {
    837                 mRowStride = rowStride;
    838                 mPixelStride = pixelStride;
    839 
    840                 // only safe to access length bytes starting from buffer[offset]
    841                 int length = (h - 1) * rowStride + (w - 1) * pixelStride + 1;
    842 
    843                 buffer.position(offset);
    844                 mByteBuffer = buffer.slice();
    845                 mByteBuffer.limit(length);
    846             }
    847 
    848             @Override
    849             public int getRowStride() {
    850                 return mRowStride;
    851             }
    852 
    853             @Override
    854             public int getPixelStride() {
    855                 return mPixelStride;
    856             }
    857 
    858             @Override
    859             public ByteBuffer getBuffer() {
    860                 return mByteBuffer;
    861             }
    862         }
    863     }
    864 
    865     /**
    866      * Fills input image for encoder from YUV buffers.
    867      * @return size of enqueued data.
    868      */
    869     private int queueInputImageEncoder(
    870             MediaCodec codec, Image image, int index, int frameCount, int flags) {
    871         assertTrue(image.getFormat() == ImageFormat.YUV_420_888);
    872 
    873 
    874         Point origin = getOrigin(frameCount);
    875 
    876         // Y color first
    877         CodecImage srcImage = new YUVImage(
    878                 origin,
    879                 mVideoWidth, mVideoHeight,
    880                 mBufferWidth, mBufferHeight,
    881                 isSrcSemiPlanar(),
    882                 mYDirectBuffer, mUVDirectBuffer);
    883 
    884         CodecUtils.copyFlexYUVImage(image, srcImage);
    885 
    886         int size = mVideoHeight * mVideoWidth * 3 / 2;
    887         long ptsUsec = computePresentationTime(frameCount);
    888 
    889         codec.queueInputBuffer(index, 0 /* offset */, size, ptsUsec /* timeUs */, flags);
    890         if (VERBOSE && (frameCount == 0)) {
    891             printByteArray("Y ", mYBuffer.array(), 0, 20);
    892             printByteArray("UV ", mUVBuffer.array(), 0, 20);
    893             printByteArray("UV ", mUVBuffer.array(), mBufferWidth * 60, 20);
    894         }
    895         return size;
    896     }
    897 
    898     /**
    899      * Dequeue encoded data from output buffer and store for later usage.
    900      */
    901     private void dequeueOutputBufferEncoder(
    902             MediaCodec codec, ByteBuffer[] outputBuffers,
    903             int index, MediaCodec.BufferInfo info) {
    904         ByteBuffer output = outputBuffers[index];
    905         int l = info.size;
    906         ByteBuffer copied = ByteBuffer.allocate(l);
    907         output.get(copied.array(), 0, l);
    908         BufferInfo savedInfo = new BufferInfo();
    909         savedInfo.set(0, l, info.presentationTimeUs, info.flags);
    910         mEncodedOutputBuffer.addLast(Pair.create(copied, savedInfo));
    911         codec.releaseOutputBuffer(index, false /* render */);
    912     }
    913 
    914     /**
    915      * run encoder benchmarking with encoded stream stored from encoding phase
    916      * @param decoderName decoder name
    917      * @param format format of media to decode
    918      * @return returns length-2 array with 0: time for decoding, 1 : rms error of pixels
    919      */
    920     private double[] runDecoder(String decoderName, MediaFormat format) {
    921         MediaCodec codec = null;
    922         try {
    923             codec = MediaCodec.createByCodecName(decoderName);
    924         } catch (IOException | NullPointerException e) {
    925             Log.i(TAG, "could not find decoder for " + format);
    926             return null;
    927         }
    928         codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
    929         codec.start();
    930         ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
    931 
    932         double totalErrorSquared = 0;
    933 
    934         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    935         boolean sawOutputEOS = false;
    936         int inputLeft = mEncodedOutputBuffer.size();
    937         int inputBufferCount = 0;
    938         int outFrameCount = 0;
    939         YUVValue expected = new YUVValue();
    940         YUVValue decoded = new YUVValue();
    941         long lastOutputTimeNs = 0;
    942         long start = System.currentTimeMillis();
    943         while (!sawOutputEOS) {
    944             if (inputLeft > 0) {
    945                 int inputBufIndex = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US);
    946 
    947                 if (inputBufIndex >= 0) {
    948                     ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
    949                     dstBuf.clear();
    950                     ByteBuffer src = mEncodedOutputBuffer.get(inputBufferCount).first;
    951                     BufferInfo srcInfo = mEncodedOutputBuffer.get(inputBufferCount).second;
    952                     int writeSize = src.capacity();
    953                     dstBuf.put(src.array(), 0, writeSize);
    954 
    955                     int flags = srcInfo.flags;
    956                     if ((System.currentTimeMillis() - start) > mTestConfig.mMaxTimeMs) {
    957                         flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
    958                     }
    959 
    960                     codec.queueInputBuffer(
    961                             inputBufIndex,
    962                             0 /* offset */,
    963                             writeSize,
    964                             srcInfo.presentationTimeUs,
    965                             flags);
    966                     inputLeft --;
    967                     inputBufferCount ++;
    968                 }
    969             }
    970 
    971             int res = codec.dequeueOutputBuffer(info, VIDEO_CODEC_WAIT_TIME_US);
    972             if (res >= 0) {
    973                 int outputBufIndex = res;
    974 
    975                 // only do YUV compare on EOS frame if the buffer size is none-zero
    976                 if (info.size > 0) {
    977                     if (lastOutputTimeNs > 0) {
    978                         int pos = outFrameCount - 1;
    979                         if (pos < mDecoderFrameTimeDiff[mCurrentTestRound].length) {
    980                             long diff = System.nanoTime() - lastOutputTimeNs;
    981                             mDecoderFrameTimeDiff[mCurrentTestRound][pos] = diff;
    982                         }
    983                     }
    984                     lastOutputTimeNs = System.nanoTime();
    985 
    986                     if (mTestConfig.mTestPixels) {
    987                         Point origin = getOrigin(outFrameCount);
    988                         int i;
    989 
    990                         // if decoder supports planar or semiplanar, check output with
    991                         // ByteBuffer & Image each on half of the points
    992                         int pixelCheckPerFrame = PIXEL_CHECK_PER_FRAME;
    993                         if (!isDstFlexYUV()) {
    994                             pixelCheckPerFrame /= 2;
    995                             ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
    996                             if (VERBOSE && (outFrameCount == 0)) {
    997                                 printByteBuffer("Y ", buf, 0, 20);
    998                                 printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20);
    999                                 printByteBuffer("UV ", buf,
   1000                                         mVideoWidth * mVideoHeight + mVideoWidth * 60, 20);
   1001                             }
   1002                             for (i = 0; i < pixelCheckPerFrame; i++) {
   1003                                 int w = mRandom.nextInt(mVideoWidth);
   1004                                 int h = mRandom.nextInt(mVideoHeight);
   1005                                 getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
   1006                                 getPixelValuesFromOutputBuffer(buf, w, h, decoded);
   1007                                 if (VERBOSE) {
   1008                                     Log.i(TAG, outFrameCount + "-" + i + "- th round: ByteBuffer:"
   1009                                             + " expected "
   1010                                             + expected.mY + "," + expected.mU + "," + expected.mV
   1011                                             + " decoded "
   1012                                             + decoded.mY + "," + decoded.mU + "," + decoded.mV);
   1013                                 }
   1014                                 totalErrorSquared += expected.calcErrorSquared(decoded);
   1015                             }
   1016                         }
   1017 
   1018                         Image image = codec.getOutputImage(outputBufIndex);
   1019                         assertTrue(image != null);
   1020                         for (i = 0; i < pixelCheckPerFrame; i++) {
   1021                             int w = mRandom.nextInt(mVideoWidth);
   1022                             int h = mRandom.nextInt(mVideoHeight);
   1023                             getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
   1024                             getPixelValuesFromImage(image, w, h, decoded);
   1025                             if (VERBOSE) {
   1026                                 Log.i(TAG, outFrameCount + "-" + i + "- th round: FlexYUV:"
   1027                                         + " expcted "
   1028                                         + expected.mY + "," + expected.mU + "," + expected.mV
   1029                                         + " decoded "
   1030                                         + decoded.mY + "," + decoded.mU + "," + decoded.mV);
   1031                             }
   1032                             totalErrorSquared += expected.calcErrorSquared(decoded);
   1033                         }
   1034                     }
   1035                     outFrameCount++;
   1036                 }
   1037                 codec.releaseOutputBuffer(outputBufIndex, false /* render */);
   1038                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
   1039                     Log.d(TAG, "saw output EOS.");
   1040                     sawOutputEOS = true;
   1041                 }
   1042             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
   1043                 mDecOutputFormat = codec.getOutputFormat();
   1044                 Log.d(TAG, "output format has changed to " + mDecOutputFormat);
   1045                 int colorFormat = mDecOutputFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
   1046                 if (colorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar
   1047                         || colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
   1048                     mDstColorFormat = colorFormat;
   1049                 } else {
   1050                     mDstColorFormat = CodecCapabilities.COLOR_FormatYUV420Flexible;
   1051                     Log.w(TAG, "output format changed to unsupported one " +
   1052                             Integer.toHexString(colorFormat) + ", using FlexYUV");
   1053                 }
   1054             }
   1055         }
   1056         long finish = System.currentTimeMillis();
   1057         int validDataNum = Math.min(outFrameCount - 1,
   1058                 mDecoderFrameTimeDiff[mCurrentTestRound].length);
   1059         mDecoderFrameTimeDiff[mCurrentTestRound] =
   1060                 Arrays.copyOf(mDecoderFrameTimeDiff[mCurrentTestRound], validDataNum);
   1061         codec.stop();
   1062         codec.release();
   1063         codec = null;
   1064 
   1065         // divide by 3 as sum is done for Y, U, V.
   1066         double errorRms = Math.sqrt(totalErrorSquared / PIXEL_CHECK_PER_FRAME / outFrameCount / 3);
   1067         double[] result = { (double) finish - start, errorRms };
   1068         return result;
   1069     }
   1070 
   1071     /**
   1072      *  returns origin in the absolute frame for given frame count.
   1073      *  The video scene is moving by moving origin per each frame.
   1074      */
   1075     private Point getOrigin(int frameCount) {
   1076         if (frameCount < 100) {
   1077             return new Point(2 * frameCount, 0);
   1078         } else if (frameCount < 200) {
   1079             return new Point(200, (frameCount - 100) * 2);
   1080         } else {
   1081             if (frameCount > 300) { // for safety
   1082                 frameCount = 300;
   1083             }
   1084             return new Point(600 - frameCount * 2, 600 - frameCount * 2);
   1085         }
   1086     }
   1087 
   1088     /**
   1089      * initialize reference YUV plane
   1090      * @param w This should be YUV_PLANE_ADDITIONAL_LENGTH pixels bigger than video resolution
   1091      *          to allow movements
   1092      * @param h This should be YUV_PLANE_ADDITIONAL_LENGTH pixels bigger than video resolution
   1093      *          to allow movements
   1094      * @param semiPlanarEnc
   1095      * @param semiPlanarDec
   1096      */
   1097     private void initYUVPlane(int w, int h) {
   1098         int bufferSizeY = w * h;
   1099         mYBuffer = ByteBuffer.allocate(bufferSizeY);
   1100         mUVBuffer = ByteBuffer.allocate(bufferSizeY / 2);
   1101         mYDirectBuffer = ByteBuffer.allocateDirect(bufferSizeY);
   1102         mUVDirectBuffer = ByteBuffer.allocateDirect(bufferSizeY / 2);
   1103         mBufferWidth = w;
   1104         mBufferHeight = h;
   1105         final byte[] yArray = mYBuffer.array();
   1106         final byte[] uvArray = mUVBuffer.array();
   1107         for (int i = 0; i < h; i++) {
   1108             for (int j = 0; j < w; j++) {
   1109                 yArray[i * w + j]  = clampY((i + j) & 0xff);
   1110             }
   1111         }
   1112         if (isSrcSemiPlanar()) {
   1113             for (int i = 0; i < h/2; i++) {
   1114                 for (int j = 0; j < w/2; j++) {
   1115                     uvArray[i * w + 2 * j]  = (byte) (i & 0xff);
   1116                     uvArray[i * w + 2 * j + 1]  = (byte) (j & 0xff);
   1117                 }
   1118             }
   1119         } else { // planar, U first, then V
   1120             int vOffset = bufferSizeY / 4;
   1121             for (int i = 0; i < h/2; i++) {
   1122                 for (int j = 0; j < w/2; j++) {
   1123                     uvArray[i * w/2 + j]  = (byte) (i & 0xff);
   1124                     uvArray[i * w/2 + vOffset + j]  = (byte) (j & 0xff);
   1125                 }
   1126             }
   1127         }
   1128         mYDirectBuffer.put(yArray);
   1129         mUVDirectBuffer.put(uvArray);
   1130         mYDirectBuffer.rewind();
   1131         mUVDirectBuffer.rewind();
   1132     }
   1133 
   1134     /**
   1135      * class to store pixel values in YUV
   1136      *
   1137      */
   1138     public class YUVValue {
   1139         public byte mY;
   1140         public byte mU;
   1141         public byte mV;
   1142         public YUVValue() {
   1143         }
   1144 
   1145         public boolean equalTo(YUVValue other) {
   1146             return (mY == other.mY) && (mU == other.mU) && (mV == other.mV);
   1147         }
   1148 
   1149         public double calcErrorSquared(YUVValue other) {
   1150             double yDelta = mY - other.mY;
   1151             double uDelta = mU - other.mU;
   1152             double vDelta = mV - other.mV;
   1153             return yDelta * yDelta + uDelta * uDelta + vDelta * vDelta;
   1154         }
   1155     }
   1156 
   1157     /**
   1158      * Read YUV values from given position (x,y) for given origin (originX, originY)
   1159      * The whole data is already available from YBuffer and UVBuffer.
   1160      * @param result pass the result via this. This is for avoiding creating / destroying too many
   1161      *               instances
   1162      */
   1163     private void getPixelValuesFromYUVBuffers(int originX, int originY, int x, int y,
   1164             YUVValue result) {
   1165         result.mY = mYBuffer.get((originY + y) * mBufferWidth + (originX + x));
   1166         if (isSrcSemiPlanar()) {
   1167             int index = (originY + y) / 2 * mBufferWidth + (originX + x) / 2 * 2;
   1168             //Log.d(TAG, "YUV " + originX + "," + originY + "," + x + "," + y + "," + index);
   1169             result.mU = mUVBuffer.get(index);
   1170             result.mV = mUVBuffer.get(index + 1);
   1171         } else {
   1172             int vOffset = mBufferWidth * mBufferHeight / 4;
   1173             int index = (originY + y) / 2 * mBufferWidth / 2 + (originX + x) / 2;
   1174             result.mU = mUVBuffer.get(index);
   1175             result.mV = mUVBuffer.get(vOffset + index);
   1176         }
   1177     }
   1178 
   1179     /**
   1180      * Read YUV pixels from decoded output buffer for give (x, y) position
   1181      * Output buffer is composed of Y parts followed by U/V
   1182      * @param result pass the result via this. This is for avoiding creating / destroying too many
   1183      *               instances
   1184      */
   1185     private void getPixelValuesFromOutputBuffer(ByteBuffer buffer, int x, int y, YUVValue result) {
   1186         result.mY = buffer.get(y * mVideoWidth + x);
   1187         if (isDstSemiPlanar()) {
   1188             int index = mVideoWidth * mVideoHeight + y / 2 * mVideoWidth + x / 2 * 2;
   1189             //Log.d(TAG, "Decoded " + x + "," + y + "," + index);
   1190             result.mU = buffer.get(index);
   1191             result.mV = buffer.get(index + 1);
   1192         } else {
   1193             int vOffset = mVideoWidth * mVideoHeight / 4;
   1194             int index = mVideoWidth * mVideoHeight + y / 2 * mVideoWidth / 2 + x / 2;
   1195             result.mU = buffer.get(index);
   1196             result.mV = buffer.get(index + vOffset);
   1197         }
   1198     }
   1199 
   1200     private void getPixelValuesFromImage(Image image, int x, int y, YUVValue result) {
   1201         assertTrue(image.getFormat() == ImageFormat.YUV_420_888);
   1202 
   1203         Plane[] planes = image.getPlanes();
   1204         assertTrue(planes.length == 3);
   1205 
   1206         result.mY = getPixelFromPlane(planes[0], x, y);
   1207         result.mU = getPixelFromPlane(planes[1], x / 2, y / 2);
   1208         result.mV = getPixelFromPlane(planes[2], x / 2, y / 2);
   1209     }
   1210 
   1211     private byte getPixelFromPlane(Plane plane, int x, int y) {
   1212         ByteBuffer buf = plane.getBuffer();
   1213         return buf.get(y * plane.getRowStride() + x * plane.getPixelStride());
   1214     }
   1215 
   1216     /**
   1217      * Y cannot have full range. clamp it to prevent invalid value.
   1218      */
   1219     private byte clampY(int y) {
   1220         if (y < Y_CLAMP_MIN) {
   1221             y = Y_CLAMP_MIN;
   1222         } else if (y > Y_CLAMP_MAX) {
   1223             y = Y_CLAMP_MAX;
   1224         }
   1225         return (byte) (y & 0xff);
   1226     }
   1227 
   1228     // for debugging
   1229     private void printByteArray(String msg, byte[] data, int offset, int len) {
   1230         StringBuilder builder = new StringBuilder();
   1231         builder.append(msg);
   1232         builder.append(":");
   1233         for (int i = offset; i < offset + len; i++) {
   1234             builder.append(Integer.toHexString(data[i]));
   1235             builder.append(",");
   1236         }
   1237         builder.deleteCharAt(builder.length() - 1);
   1238         Log.i(TAG, builder.toString());
   1239     }
   1240 
   1241     // for debugging
   1242     private void printByteBuffer(String msg, ByteBuffer data, int offset, int len) {
   1243         StringBuilder builder = new StringBuilder();
   1244         builder.append(msg);
   1245         builder.append(":");
   1246         for (int i = offset; i < offset + len; i++) {
   1247             builder.append(Integer.toHexString(data.get(i)));
   1248             builder.append(",");
   1249         }
   1250         builder.deleteCharAt(builder.length() - 1);
   1251         Log.i(TAG, builder.toString());
   1252     }
   1253 
   1254     /**
   1255      * Generates the presentation time for frame N, in microseconds.
   1256      */
   1257     private long computePresentationTime(int frameIndex) {
   1258         return 132 + frameIndex * 1000000L / mFrameRate;
   1259     }
   1260 }
   1261