Home | History | Annotate | Download | only in cts
      1 /*
      2  * Copyright (C) 2016 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 package android.media.cts;
     17 
     18 import android.media.cts.R;
     19 
     20 import android.annotation.TargetApi;
     21 import android.app.Activity;
     22 import android.content.Context;
     23 import android.content.Intent;
     24 import android.content.pm.ActivityInfo;
     25 import android.content.res.Configuration;
     26 import android.content.res.Resources;
     27 import android.graphics.Bitmap;
     28 import android.graphics.Bitmap.Config;
     29 import android.graphics.BitmapFactory;
     30 import android.graphics.Color;
     31 import android.graphics.SurfaceTexture;
     32 import android.media.MediaCodec;
     33 import android.media.MediaCodec.BufferInfo;
     34 import android.media.MediaCodec.CodecException;
     35 import android.media.MediaExtractor;
     36 import android.media.MediaFormat;
     37 import android.net.Uri;
     38 import android.opengl.EGL14;
     39 import android.opengl.GLES11Ext;
     40 import android.opengl.GLES20;
     41 import android.opengl.GLSurfaceView;
     42 import android.os.Build;
     43 import android.os.Handler;
     44 import android.os.HandlerThread;
     45 import android.os.Looper;
     46 import android.os.SystemClock;
     47 import android.test.ActivityInstrumentationTestCase2;
     48 import android.util.Log;
     49 import android.util.Pair;
     50 import android.util.SparseArray;
     51 import android.view.PixelCopy;
     52 import android.view.PixelCopy.OnPixelCopyFinishedListener;
     53 import android.view.Surface;
     54 import android.view.SurfaceHolder;
     55 import android.view.SurfaceView;
     56 import android.view.TextureView;
     57 import android.view.View;
     58 import android.view.ViewGroup;
     59 import android.widget.RelativeLayout;
     60 
     61 import java.io.File;
     62 import java.io.FileOutputStream;
     63 import java.io.InputStream;
     64 import java.io.IOException;
     65 import java.nio.ByteBuffer;
     66 import java.nio.ByteOrder;
     67 import java.nio.FloatBuffer;
     68 import java.util.concurrent.TimeUnit;
     69 
     70 import javax.microedition.khronos.egl.EGL10;
     71 import javax.microedition.khronos.egl.EGLConfig;
     72 import javax.microedition.khronos.egl.EGLContext;
     73 import javax.microedition.khronos.egl.EGLDisplay;
     74 import javax.microedition.khronos.egl.EGLSurface;
     75 
     76 @TargetApi(16)
     77 public class DecodeAccuracyTestBase
     78     extends ActivityInstrumentationTestCase2<DecodeAccuracyTestActivity> {
     79 
     80     protected Context mContext;
     81     protected Resources mResources;
     82     protected DecodeAccuracyTestActivity mActivity;
     83     protected TestHelper testHelper;
     84 
     85     public DecodeAccuracyTestBase() {
     86         super(DecodeAccuracyTestActivity.class);
     87     }
     88 
     89     @Override
     90     protected void setUp() throws Exception {
     91         super.setUp();
     92         mActivity = getActivity();
     93         getInstrumentation().waitForIdleSync();
     94         mContext = getInstrumentation().getTargetContext();
     95         mResources = mContext.getResources();
     96         testHelper = new TestHelper(mContext, mActivity);
     97     }
     98 
     99     @Override
    100     protected void tearDown() throws Exception {
    101         mActivity = null;
    102         super.tearDown();
    103     }
    104 
    105     protected void bringActivityToFront() {
    106         Intent intent = new Intent(mContext, DecodeAccuracyTestActivity.class);
    107         intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
    108         mActivity.startActivity(intent);
    109     }
    110 
    111     protected TestHelper getHelper() {
    112         return testHelper;
    113     }
    114 
    115     public static <T> T checkNotNull(T reference) {
    116         assertNotNull(reference);
    117         return reference;
    118     }
    119 
    120     public static class SimplePlayer {
    121 
    122         public static final long DECODE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(1) / 2;
    123 
    124         private static final int NO_TRACK_INDEX = -3;
    125         private static final long DEQUEUE_TIMEOUT_US = 20;
    126         private static final String TAG = SimplePlayer.class.getSimpleName();
    127 
    128         private final Context context;
    129         private final MediaExtractor extractor;
    130         private MediaCodec decoder;
    131 
    132         public SimplePlayer(Context context) {
    133             this(context, new MediaExtractor());
    134         }
    135 
    136         public SimplePlayer(Context context, MediaExtractor extractor) {
    137             this.context = checkNotNull(context);
    138             this.extractor = checkNotNull(extractor);
    139         }
    140 
    141         /*
    142          * The function play the corresponding file for certain number of frames,
    143          *
    144          * @param surface is the surface view of decoder output.
    145          * @param videoFormat is the format of the video to extract and decode.
    146          * @param numOfTotalFrame is the number of Frame wish to play.
    147          * @return a PlayerResult object indicating success or failure.
    148          */
    149         public PlayerResult decodeVideoFrames(
    150                 Surface surface, VideoFormat videoFormat, int numOfTotalFrames) {
    151             PlayerResult playerResult;
    152             if (prepare(surface, videoFormat)) {
    153                 if (startDecoder()) {
    154                     playerResult = decodeFramesAndDisplay(
    155                             surface, numOfTotalFrames, numOfTotalFrames * DECODE_TIMEOUT_MS);
    156                 } else {
    157                     playerResult = PlayerResult.failToStart();
    158                 }
    159             } else {
    160                 playerResult = new PlayerResult();
    161             }
    162             release();
    163             return new PlayerResult(playerResult);
    164         }
    165 
    166         public PlayerResult decodeVideoFrames(VideoFormat videoFormat, int numOfTotalFrames) {
    167             return decodeVideoFrames(null, videoFormat, numOfTotalFrames);
    168         }
    169 
    170         /*
    171          * The function set up the extractor and decoder with proper format.
    172          * This must be called before decodeFramesAndDisplay.
    173          */
    174         private boolean prepare(Surface surface, VideoFormat videoFormat) {
    175             Log.i(TAG, "Preparing to decode the media file.");
    176             if (!setExtractorDataSource(videoFormat)) {
    177                 return false;
    178             }
    179             int trackNum = getFirstVideoTrackIndex(extractor);
    180             if (trackNum == NO_TRACK_INDEX) {
    181                 return false;
    182             }
    183             extractor.selectTrack(trackNum);
    184             MediaFormat mediaFormat = extractor.getTrackFormat(trackNum);
    185             configureFormat(mediaFormat, videoFormat);
    186             return configureDecoder(surface, mediaFormat);
    187         }
    188 
    189         /* The function decode video frames and display in a surface. */
    190         private PlayerResult decodeFramesAndDisplay(
    191                 Surface surface, int numOfTotalFrames, long timeOutMs) {
    192             Log.i(TAG, "Starting decoding.");
    193             checkNotNull(decoder);
    194             int numOfDecodedFrames = 0;
    195             long decodeStart = 0;
    196             boolean renderToSurface = surface != null ? true : false;
    197             BufferInfo info = new BufferInfo();
    198             ByteBuffer inputBuffer;
    199             ByteBuffer[] inputBufferArray = decoder.getInputBuffers();
    200             long loopStart = SystemClock.elapsedRealtime();
    201 
    202             while (numOfDecodedFrames < numOfTotalFrames
    203                     && (SystemClock.elapsedRealtime() - loopStart < timeOutMs)) {
    204                 try {
    205                     int inputBufferIndex = decoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
    206                     if (inputBufferIndex >= 0) {
    207                         if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
    208                             inputBuffer = inputBufferArray[inputBufferIndex];
    209                         } else {
    210                             inputBuffer = decoder.getInputBuffer(inputBufferIndex);
    211                         }
    212                         if (decodeStart == 0) {
    213                             decodeStart = SystemClock.elapsedRealtime();
    214                         }
    215                         int sampleSize = extractor.readSampleData(inputBuffer, 0);
    216                         if (sampleSize > 0) {
    217                             decoder.queueInputBuffer(
    218                                     inputBufferIndex, 0, sampleSize, extractor.getSampleTime(), 0);
    219                             extractor.advance();
    220                         }
    221                     }
    222                     int decoderStatus = decoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US);
    223                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    224                         break;
    225                     }
    226                     if (decoderStatus >= 0 && info.size > 0) {
    227                         decoder.releaseOutputBuffer(decoderStatus, renderToSurface);
    228                         numOfDecodedFrames++;
    229                     }
    230                 } catch (IllegalStateException exception) {
    231                     Log.e(TAG, "IllegalStateException in decodeFramesAndDisplay", exception);
    232                     break;
    233                 }
    234             }
    235             long totalTime = SystemClock.elapsedRealtime() - decodeStart;
    236             Log.i(TAG, "Finishing decoding.");
    237             return new PlayerResult(true, true, numOfTotalFrames == numOfDecodedFrames, totalTime);
    238         }
    239 
    240         private void release() {
    241             decoderRelease();
    242             extractorRelease();
    243         }
    244 
    245         private boolean setExtractorDataSource(VideoFormat videoFormat) {
    246             try {
    247                 extractor.setDataSource(context, videoFormat.loadUri(context), null);
    248             } catch (IOException exception) {
    249                 Log.e(TAG, "IOException in setDataSource", exception);
    250                 return false;
    251             }
    252             return true;
    253         }
    254 
    255         private boolean configureDecoder(Surface surface, MediaFormat mediaFormat) {
    256             try {
    257                 decoder = MediaCodec.createDecoderByType(
    258                         mediaFormat.getString(MediaFormat.KEY_MIME));
    259                 decoder.configure(mediaFormat, surface, null, 0);
    260             } catch (Exception exception) {
    261                 if (exception instanceof IOException) {
    262                     Log.e(TAG, "IOException in createDecoderByType", exception);
    263                 } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
    264                         && exception instanceof CodecException) {
    265                     Log.e(TAG, "CodecException in createDecoderByType", exception);
    266                     decoder.reset();
    267                 } else {
    268                     Log.e(TAG, "Unknown exception in createDecoderByType", exception);
    269                 }
    270                 decoderRelease();
    271                 return false;
    272             }
    273             return true;
    274         }
    275 
    276         private boolean startDecoder() {
    277             try {
    278                 decoder.start();
    279             } catch (Exception exception) {
    280                 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
    281                         && exception instanceof CodecException) {
    282                     Log.e(TAG, "CodecException in startDecoder", exception);
    283                     decoder.reset();
    284                 } else if (exception instanceof IllegalStateException) {
    285                     Log.e(TAG, "IllegalStateException in startDecoder", exception);
    286                 } else {
    287                     Log.e(TAG, "Unknown exception in startDecoder", exception);
    288                 }
    289                 decoderRelease();
    290                 return false;
    291             }
    292             return true;
    293         }
    294 
    295         private void decoderRelease() {
    296             if (decoder == null) {
    297                 return;
    298             }
    299             try {
    300                 decoder.stop();
    301             } catch (IllegalStateException exception) {
    302                 // IllegalStateException happens when decoder fail to start.
    303                 Log.e(TAG, "IllegalStateException in decoder stop", exception);
    304             } finally {
    305                 try {
    306                     decoder.release();
    307                 } catch (IllegalStateException exception) {
    308                     Log.e(TAG, "IllegalStateException in decoder release", exception);
    309                 }
    310             }
    311             decoder = null;
    312         }
    313 
    314         private void extractorRelease() {
    315             if (extractor == null) {
    316                 return;
    317             }
    318             try {
    319                 extractor.release();
    320             } catch (IllegalStateException exception) {
    321                 Log.e(TAG, "IllegalStateException in extractor release", exception);
    322             }
    323         }
    324 
    325         private static void configureFormat(MediaFormat mediaFormat, VideoFormat videoFormat) {
    326             checkNotNull(mediaFormat);
    327             checkNotNull(videoFormat);
    328             videoFormat.setMimeType(mediaFormat.getString(MediaFormat.KEY_MIME));
    329             videoFormat.setWidth(mediaFormat.getInteger(MediaFormat.KEY_WIDTH));
    330             videoFormat.setHeight(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT));
    331             mediaFormat.setInteger(MediaFormat.KEY_WIDTH, videoFormat.getWidth());
    332             mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, videoFormat.getHeight());
    333 
    334             if (videoFormat.getMaxWidth() != VideoFormat.UNSET
    335                     && videoFormat.getMaxHeight() != VideoFormat.UNSET) {
    336                 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, videoFormat.getMaxWidth());
    337                 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, videoFormat.getMaxHeight());
    338             }
    339         }
    340 
    341         /*
    342          * The function returns the first video track found.
    343          *
    344          * @param extractor is the media extractor instantiated with a video uri.
    345          * @return the index of the first video track if found, NO_TRACK_INDEX otherwise.
    346          */
    347         private static int getFirstVideoTrackIndex(MediaExtractor extractor) {
    348             for (int i = 0; i < extractor.getTrackCount(); i++) {
    349                 MediaFormat trackMediaFormat = extractor.getTrackFormat(i);
    350                 if (trackMediaFormat.getString(MediaFormat.KEY_MIME).startsWith("video/")) {
    351                     return i;
    352                 }
    353             }
    354             Log.e(TAG, "couldn't get a video track");
    355             return NO_TRACK_INDEX;
    356         }
    357 
    358         /* Stores the result from SimplePlayer. */
    359         public static final class PlayerResult {
    360 
    361             public static final int UNSET = -1;
    362             private final boolean configureSuccess;
    363             private final boolean startSuccess;
    364             private final boolean decodeSuccess;
    365             private final long totalTime;
    366 
    367             public PlayerResult(
    368                     boolean configureSuccess, boolean startSuccess,
    369                     boolean decodeSuccess, long totalTime) {
    370                 this.configureSuccess = configureSuccess;
    371                 this.startSuccess = startSuccess;
    372                 this.decodeSuccess = decodeSuccess;
    373                 this.totalTime = totalTime;
    374             }
    375 
    376             public PlayerResult(PlayerResult playerResult) {
    377                 this(playerResult.configureSuccess, playerResult.startSuccess,
    378                         playerResult.decodeSuccess, playerResult.totalTime);
    379             }
    380 
    381             public PlayerResult() {
    382                 // Dummy PlayerResult.
    383                 this(false, false, false, UNSET);
    384             }
    385 
    386             public static PlayerResult failToStart() {
    387                 return new PlayerResult(true, false, false, UNSET);
    388             }
    389 
    390             public boolean isConfigureSuccess() {
    391                 return configureSuccess;
    392             }
    393 
    394             public boolean isStartSuccess() {
    395                 return startSuccess;
    396             }
    397 
    398             public boolean isDecodeSuccess() {
    399                 return decodeSuccess;
    400             }
    401 
    402             public boolean isSuccess() {
    403                 return isConfigureSuccess() && isStartSuccess()
    404                         && isDecodeSuccess() && getTotalTime() != UNSET;
    405             }
    406 
    407             public long getTotalTime() {
    408                 return totalTime;
    409             }
    410 
    411             public boolean isFailureForAll() {
    412                 return (!isConfigureSuccess() && !isStartSuccess()
    413                         && !isDecodeSuccess() && getTotalTime() == UNSET);
    414             }
    415         }
    416 
    417     }
    418 
    419     /* Utility class for collecting common test case functionality. */
    420     class TestHelper {
    421 
    422         private final Context context;
    423         private final Handler handler;
    424         private final Activity activity;
    425 
    426         public TestHelper(Context context, Activity activity) {
    427             this.context = checkNotNull(context);
    428             this.handler = new Handler(Looper.getMainLooper());
    429             this.activity = activity;
    430         }
    431 
    432         public Bitmap generateBitmapFromImageResourceId(int resourceId) {
    433             return BitmapFactory.decodeStream(context.getResources().openRawResource(resourceId));
    434         }
    435 
    436         public Context getContext() {
    437             return context;
    438         }
    439 
    440         public void rotateOrientation() {
    441             handler.post(new Runnable() {
    442                 @Override
    443                 public void run() {
    444                     final int orientation = context.getResources().getConfiguration().orientation;
    445                     if (orientation == Configuration.ORIENTATION_PORTRAIT) {
    446                         activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
    447                     } else {
    448                         activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
    449                     }
    450                 }
    451             });
    452         }
    453 
    454         public void unsetOrientation() {
    455             handler.post(new Runnable() {
    456                 @Override
    457                 public void run() {
    458                     activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
    459                 }
    460             });
    461         }
    462 
    463         public void generateView(View view) {
    464             RelativeLayout relativeLayout =
    465                     (RelativeLayout) activity.findViewById(R.id.attach_view);
    466             ViewGenerator viewGenerator = new ViewGenerator(relativeLayout, view);
    467             handler.post(viewGenerator);
    468         }
    469 
    470         public void cleanUpView(View view) {
    471             ViewCleaner viewCleaner = new ViewCleaner(view);
    472             handler.post(viewCleaner);
    473         }
    474 
    475         public synchronized Bitmap generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot) {
    476             handler.post(snapshot);
    477             try {
    478                 while (!snapshot.isBitmapReady()) {
    479                     Thread.sleep(100);
    480                 }
    481             } catch (InterruptedException e) {
    482                 e.printStackTrace();
    483             }
    484             return snapshot.getBitmap();
    485         }
    486 
    487         private class ViewGenerator implements Runnable {
    488 
    489             private final View view;
    490             private final RelativeLayout relativeLayout;
    491 
    492             public ViewGenerator(RelativeLayout relativeLayout, View view) {
    493                 this.view = checkNotNull(view);
    494                 this.relativeLayout = checkNotNull(relativeLayout);
    495             }
    496 
    497             @Override
    498             public void run() {
    499                 if (view.getParent() != null) {
    500                     ((ViewGroup) view.getParent()).removeView(view);
    501                 }
    502                 RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(
    503                         VideoViewFactory.VIEW_WIDTH, VideoViewFactory.VIEW_HEIGHT);
    504                 view.setLayoutParams(params);
    505                 relativeLayout.addView(view);
    506             }
    507 
    508         }
    509 
    510         private class ViewCleaner implements Runnable {
    511 
    512             private final View view;
    513 
    514             public ViewCleaner(View view) {
    515                 this.view = checkNotNull(view);
    516             }
    517 
    518             @Override
    519             public void run() {
    520                 if (view.getParent() != null) {
    521                     ((ViewGroup) view.getParent()).removeView(view);
    522                 }
    523             }
    524 
    525         }
    526 
    527     }
    528 
    529 }
    530 
    531 /* Factory for manipulating a {@link View}. */
    532 abstract class VideoViewFactory {
    533 
    534     public static final long VIEW_WAITTIME_MS = TimeUnit.SECONDS.toMillis(1);
    535     public static final long DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(3);
    536     public static final int VIEW_WIDTH = 480;
    537     public static final int VIEW_HEIGHT = 360;
    538 
    539     public VideoViewFactory() {}
    540 
    541     public abstract void release();
    542 
    543     public abstract String getName();
    544 
    545     public abstract View createView(Context context);
    546 
    547     public void waitForViewIsAvailable() throws Exception {
    548         waitForViewIsAvailable(DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS);
    549     };
    550 
    551     public abstract void waitForViewIsAvailable(long timeOutMs) throws Exception;
    552 
    553     public abstract Surface getSurface();
    554 
    555     public abstract VideoViewSnapshot getVideoViewSnapshot();
    556 
    557     public boolean hasLooper() {
    558         return Looper.myLooper() != null;
    559     }
    560 
    561 }
    562 
    563 /* Factory for building a {@link TextureView}. */
    564 @TargetApi(16)
    565 class TextureViewFactory extends VideoViewFactory implements TextureView.SurfaceTextureListener {
    566 
    567     private static final String TAG = TextureViewFactory.class.getSimpleName();
    568     private static final String NAME = "TextureView";
    569 
    570     private final Object syncToken = new Object();
    571     private TextureView textureView;
    572 
    573     public TextureViewFactory() {}
    574 
    575     @Override
    576     public TextureView createView(Context context) {
    577         Log.i(TAG, "Creating a " + NAME);
    578         textureView = DecodeAccuracyTestBase.checkNotNull(new TextureView(context));
    579         textureView.setSurfaceTextureListener(this);
    580         return textureView;
    581     }
    582 
    583     @Override
    584     public void release() {
    585         textureView = null;
    586     }
    587 
    588     @Override
    589     public String getName() {
    590         return NAME;
    591     }
    592 
    593     @Override
    594     public Surface getSurface() {
    595         return new Surface(textureView.getSurfaceTexture());
    596     }
    597 
    598     @Override
    599     public TextureViewSnapshot getVideoViewSnapshot() {
    600         return new TextureViewSnapshot(textureView);
    601     }
    602 
    603     @Override
    604     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
    605         final long start = SystemClock.elapsedRealtime();
    606         while (SystemClock.elapsedRealtime() - start < timeOutMs && !textureView.isAvailable()) {
    607             synchronized (syncToken) {
    608                 try {
    609                     syncToken.wait(VIEW_WAITTIME_MS);
    610                 } catch (InterruptedException e) {
    611                     Log.e(TAG, "Exception occurred when attaching a TextureView to a window.", e);
    612                     throw new InterruptedException(e.getMessage());
    613                 }
    614             }
    615         }
    616         if (!textureView.isAvailable()) {
    617             throw new InterruptedException("Taking too long to attach a TextureView to a window.");
    618         }
    619         Log.i(TAG, NAME + " is available.");
    620     }
    621 
    622     @Override
    623     public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
    624         synchronized (syncToken) {
    625             syncToken.notify();
    626         }
    627     }
    628 
    629     @Override
    630     public void onSurfaceTextureSizeChanged(
    631             SurfaceTexture surfaceTexture, int width, int height) {}
    632 
    633     @Override
    634     public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
    635         return false;
    636     }
    637 
    638     @Override
    639     public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {}
    640 
    641 }
    642 
    643 /**
    644  * Factory for building a {@link SurfaceView}
    645  */
    646 @TargetApi(24)
    647 class SurfaceViewFactory extends VideoViewFactory implements SurfaceHolder.Callback {
    648 
    649     private static final String TAG = SurfaceViewFactory.class.getSimpleName();
    650     private static final String NAME = "SurfaceView";
    651 
    652     private final Object syncToken = new Object();
    653     private SurfaceViewSnapshot surfaceViewSnapshot;
    654     private SurfaceView surfaceView;
    655     private SurfaceHolder surfaceHolder;
    656 
    657     public SurfaceViewFactory() {}
    658 
    659     @Override
    660     public void release() {
    661         if (surfaceViewSnapshot != null) {
    662             surfaceViewSnapshot.release();
    663         }
    664         surfaceView = null;
    665         surfaceHolder = null;
    666     }
    667 
    668     @Override
    669     public String getName() {
    670         return NAME;
    671     }
    672 
    673     @Override
    674     public View createView(Context context) {
    675         Log.i(TAG, "Creating a " + NAME);
    676         if (!super.hasLooper()) {
    677             Looper.prepare();
    678         }
    679         surfaceView = new SurfaceView(context);
    680         surfaceHolder = surfaceView.getHolder();
    681         surfaceHolder.addCallback(this);
    682         return surfaceView;
    683     }
    684 
    685     @Override
    686     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
    687         final long start = SystemClock.elapsedRealtime();
    688         while (SystemClock.elapsedRealtime() - start < timeOutMs && !getSurface().isValid()) {
    689             synchronized (syncToken) {
    690                 try {
    691                     syncToken.wait(VIEW_WAITTIME_MS);
    692                 } catch (InterruptedException e) {
    693                     Log.e(TAG, "Exception occurred when attaching a SurfaceView to a window.", e);
    694                     throw new InterruptedException(e.getMessage());
    695                 }
    696             }
    697         }
    698         if (!getSurface().isValid()) {
    699             throw new InterruptedException("Taking too long to attach a SurfaceView to a window.");
    700         }
    701         Log.i(TAG, NAME + " is available.");
    702     }
    703 
    704     @Override
    705     public Surface getSurface() {
    706         return surfaceHolder.getSurface();
    707     }
    708 
    709     @Override
    710     public VideoViewSnapshot getVideoViewSnapshot() {
    711         surfaceViewSnapshot = new SurfaceViewSnapshot(surfaceView, VIEW_WIDTH, VIEW_HEIGHT);
    712         return surfaceViewSnapshot;
    713     }
    714 
    715     @Override
    716     public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
    717 
    718     @Override
    719     public void surfaceCreated(SurfaceHolder holder) {
    720         synchronized (syncToken) {
    721             syncToken.notify();
    722         }
    723     }
    724 
    725     @Override
    726     public void surfaceDestroyed(SurfaceHolder holder) {}
    727 
    728 }
    729 
    730 /**
    731  * Factory for building EGL and GLES that could render to GLSurfaceView.
    732  * {@link GLSurfaceView} {@link EGL10} {@link GLES20}.
    733  */
    734 @TargetApi(16)
    735 class GLSurfaceViewFactory extends VideoViewFactory {
    736 
    737     private static final String TAG = GLSurfaceViewFactory.class.getSimpleName();
    738     private static final String NAME = "GLSurfaceView";
    739 
    740     private final Object surfaceSyncToken = new Object();
    741 
    742     private GLSurfaceViewThread glSurfaceViewThread;
    743     private boolean byteBufferIsReady = false;
    744 
    745     public GLSurfaceViewFactory() {}
    746 
    747     @Override
    748     public void release() {
    749         glSurfaceViewThread.release();
    750         glSurfaceViewThread = null;
    751     }
    752 
    753     @Override
    754     public String getName() {
    755         return NAME;
    756     }
    757 
    758     @Override
    759     public View createView(Context context) {
    760         Log.i(TAG, "Creating a " + NAME);
    761         // Do all GL rendering in the GL thread.
    762         glSurfaceViewThread = new GLSurfaceViewThread();
    763         glSurfaceViewThread.start();
    764         // No necessary view to display, return null.
    765         return null;
    766     }
    767 
    768     @Override
    769     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
    770         final long start = SystemClock.elapsedRealtime();
    771         while (SystemClock.elapsedRealtime() - start < timeOutMs
    772                 && glSurfaceViewThread.getSurface() == null) {
    773             synchronized (surfaceSyncToken) {
    774                 try {
    775                     surfaceSyncToken.wait(VIEW_WAITTIME_MS);
    776                 } catch (InterruptedException e) {
    777                     Log.e(TAG, "Exception occurred when waiting for the surface from"
    778                             + " GLSurfaceView to become available.", e);
    779                     throw new InterruptedException(e.getMessage());
    780                 }
    781             }
    782         }
    783         if (glSurfaceViewThread.getSurface() == null) {
    784             throw new InterruptedException("Taking too long for the surface from"
    785                     + " GLSurfaceView to become available.");
    786         }
    787         Log.i(TAG, NAME + " is available.");
    788     }
    789 
    790     @Override
    791     public Surface getSurface() {
    792         return glSurfaceViewThread.getSurface();
    793     }
    794 
    795     @Override
    796     public VideoViewSnapshot getVideoViewSnapshot() {
    797         return new GLSurfaceViewSnapshot(this, VIEW_WIDTH, VIEW_HEIGHT);
    798     }
    799 
    800     public boolean byteBufferIsReady() {
    801         return byteBufferIsReady;
    802     }
    803 
    804     public ByteBuffer getByteBuffer() {
    805         return glSurfaceViewThread.getByteBuffer();
    806     }
    807 
    808     /* Does all GL operations. */
    809     private class GLSurfaceViewThread extends Thread
    810             implements SurfaceTexture.OnFrameAvailableListener {
    811 
    812         private static final int FLOAT_SIZE_BYTES = 4;
    813         private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
    814         private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
    815         private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
    816         private FloatBuffer triangleVertices;
    817         private float[] textureTransform = new float[16];
    818 
    819         private float[] triangleVerticesData = {
    820                 // X, Y, Z, U, V
    821                 -1f, -1f,  0f,  0f,  1f,
    822                  1f, -1f,  0f,  1f,  1f,
    823                 -1f,  1f,  0f,  0f,  0f,
    824                  1f,  1f,  0f,  1f,  0f,
    825         };
    826         // Make the top-left corner corresponds to texture coordinate
    827         // (0, 0). This complies with the transformation matrix obtained from
    828         // SurfaceTexture.getTransformMatrix.
    829 
    830         private static final String VERTEX_SHADER =
    831                 "attribute vec4 aPosition;\n"
    832                 + "attribute vec4 aTextureCoord;\n"
    833                 + "uniform mat4 uTextureTransform;\n"
    834                 + "varying vec2 vTextureCoord;\n"
    835                 + "void main() {\n"
    836                 + "    gl_Position = aPosition;\n"
    837                 + "    vTextureCoord = (uTextureTransform * aTextureCoord).xy;\n"
    838                 + "}\n";
    839 
    840         private static final String FRAGMENT_SHADER =
    841                 "#extension GL_OES_EGL_image_external : require\n"
    842                 + "precision mediump float;\n"      // highp here doesn't seem to matter
    843                 + "varying vec2 vTextureCoord;\n"
    844                 + "uniform samplerExternalOES sTexture;\n"
    845                 + "void main() {\n"
    846                 + "    gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
    847                 + "}\n";
    848 
    849         private int glProgram;
    850         private int textureID = -1;
    851         private int aPositionHandle;
    852         private int aTextureHandle;
    853         private int uTextureTransformHandle;
    854         private EGLDisplay eglDisplay = null;
    855         private EGLContext eglContext = null;
    856         private EGLSurface eglSurface = null;
    857         private EGL10 egl10;
    858         private Surface surface = null;
    859         private SurfaceTexture surfaceTexture;
    860         private ByteBuffer byteBuffer;
    861 
    862         public GLSurfaceViewThread() {}
    863 
    864         @Override
    865         public void run() {
    866             Looper.prepare();
    867             triangleVertices = ByteBuffer
    868                     .allocateDirect(triangleVerticesData.length * FLOAT_SIZE_BYTES)
    869                             .order(ByteOrder.nativeOrder()).asFloatBuffer();
    870             triangleVertices.put(triangleVerticesData).position(0);
    871 
    872             eglSetup();
    873             makeCurrent();
    874             eglSurfaceCreated();
    875 
    876             surfaceTexture = new SurfaceTexture(getTextureId());
    877             surfaceTexture.setOnFrameAvailableListener(this);
    878             surface = new Surface(surfaceTexture);
    879             synchronized (surfaceSyncToken) {
    880                 surfaceSyncToken.notify();
    881             }
    882             // Store pixels from surface
    883             byteBuffer = ByteBuffer.allocateDirect(VIEW_WIDTH * VIEW_HEIGHT * 4);
    884             byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
    885             Looper.loop();
    886         }
    887 
    888         @Override
    889         public void onFrameAvailable(SurfaceTexture st) {
    890             checkGlError("before updateTexImage");
    891             surfaceTexture.updateTexImage();
    892             st.getTransformMatrix(textureTransform);
    893             drawFrame();
    894             saveFrame();
    895         }
    896 
    897         /* Prepares EGL to use GLES 2.0 context and a surface that supports pbuffer. */
    898         public void eglSetup() {
    899             egl10 = (EGL10) EGLContext.getEGL();
    900             eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
    901             if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
    902                 throw new RuntimeException("unable to get egl10 display");
    903             }
    904             int[] version = new int[2];
    905             if (!egl10.eglInitialize(eglDisplay, version)) {
    906                 eglDisplay = null;
    907                 throw new RuntimeException("unable to initialize egl10");
    908             }
    909             // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB.
    910             int[] configAttribs = {
    911                     EGL10.EGL_RED_SIZE, 8,
    912                     EGL10.EGL_GREEN_SIZE, 8,
    913                     EGL10.EGL_BLUE_SIZE, 8,
    914                     EGL10.EGL_ALPHA_SIZE, 8,
    915                     EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
    916                     EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
    917                     EGL10.EGL_NONE
    918             };
    919             EGLConfig[] configs = new EGLConfig[1];
    920             int[] numConfigs = new int[1];
    921             if (!egl10.eglChooseConfig(
    922                     eglDisplay, configAttribs, configs, configs.length, numConfigs)) {
    923                 throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
    924             }
    925             // Configure EGL context for OpenGL ES 2.0.
    926             int[] contextAttribs = {
    927                     EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
    928                     EGL10.EGL_NONE
    929             };
    930             eglContext = egl10.eglCreateContext(
    931                     eglDisplay, configs[0], EGL10.EGL_NO_CONTEXT, contextAttribs);
    932             checkEglError("eglCreateContext");
    933             if (eglContext == null) {
    934                 throw new RuntimeException("null context");
    935             }
    936             // Create a pbuffer surface.
    937             int[] surfaceAttribs = {
    938                     EGL10.EGL_WIDTH, VIEW_WIDTH,
    939                     EGL10.EGL_HEIGHT, VIEW_HEIGHT,
    940                     EGL10.EGL_NONE
    941             };
    942             eglSurface = egl10.eglCreatePbufferSurface(eglDisplay, configs[0], surfaceAttribs);
    943             checkEglError("eglCreatePbufferSurface");
    944             if (eglSurface == null) {
    945                 throw new RuntimeException("surface was null");
    946             }
    947         }
    948 
    949         public void release() {
    950             if (eglDisplay != EGL10.EGL_NO_DISPLAY) {
    951                 egl10.eglMakeCurrent(eglDisplay,
    952                         EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
    953                 egl10.eglDestroySurface(eglDisplay, eglSurface);
    954                 egl10.eglDestroyContext(eglDisplay, eglContext);
    955                 egl10.eglTerminate(eglDisplay);
    956             }
    957             eglDisplay = EGL10.EGL_NO_DISPLAY;
    958             eglContext = EGL10.EGL_NO_CONTEXT;
    959             eglSurface = EGL10.EGL_NO_SURFACE;
    960             surface.release();
    961             surfaceTexture.release();
    962         }
    963 
    964         /* Makes our EGL context and surface current. */
    965         public void makeCurrent() {
    966             if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
    967                 throw new RuntimeException("eglMakeCurrent failed");
    968             }
    969             checkEglError("eglMakeCurrent");
    970         }
    971 
    972         /* Call this after the EGL Surface is created and made current. */
    973         public void eglSurfaceCreated() {
    974             glProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
    975             if (glProgram == 0) {
    976                 throw new RuntimeException("failed creating program");
    977             }
    978             aPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition");
    979             checkLocation(aPositionHandle, "aPosition");
    980             aTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord");
    981             checkLocation(aTextureHandle, "aTextureCoord");
    982             uTextureTransformHandle = GLES20.glGetUniformLocation(glProgram, "uTextureTransform");
    983             checkLocation(uTextureTransformHandle, "uTextureTransform");
    984 
    985             int[] textures = new int[1];
    986             GLES20.glGenTextures(1, textures, 0);
    987             checkGlError("glGenTextures");
    988             textureID = textures[0];
    989             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
    990             checkGlError("glBindTexture");
    991 
    992             GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
    993                     GLES20.GL_LINEAR);
    994             GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
    995                     GLES20.GL_LINEAR);
    996             GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
    997                     GLES20.GL_CLAMP_TO_EDGE);
    998             GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
    999                     GLES20.GL_CLAMP_TO_EDGE);
   1000             checkGlError("glTexParameter");
   1001         }
   1002 
   1003         public void drawFrame() {
   1004             GLES20.glUseProgram(glProgram);
   1005             checkGlError("glUseProgram");
   1006             GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
   1007             checkGlError("glActiveTexture");
   1008             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
   1009             checkGlError("glBindTexture");
   1010 
   1011             triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
   1012             GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
   1013                     TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
   1014             checkGlError("glVertexAttribPointer aPositionHandle");
   1015             GLES20.glEnableVertexAttribArray(aPositionHandle);
   1016             checkGlError("glEnableVertexAttribArray aPositionHandle");
   1017 
   1018             triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
   1019             GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false,
   1020                     TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
   1021             checkGlError("glVertexAttribPointer aTextureHandle");
   1022             GLES20.glEnableVertexAttribArray(aTextureHandle);
   1023             checkGlError("glEnableVertexAttribArray aTextureHandle");
   1024 
   1025             GLES20.glUniformMatrix4fv(uTextureTransformHandle, 1, false,
   1026                     textureTransform, 0);
   1027             checkGlError("glUniformMatrix uTextureTransformHandle");
   1028 
   1029             GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
   1030             checkGlError("glDrawArrays");
   1031             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
   1032         }
   1033 
   1034         /* Reads the pixels to a ByteBuffer. */
   1035         public void saveFrame() {
   1036             byteBufferIsReady = false;
   1037             byteBuffer.clear();
   1038             GLES20.glReadPixels(0, 0, VIEW_WIDTH, VIEW_HEIGHT, GLES20.GL_RGBA,
   1039                     GLES20.GL_UNSIGNED_BYTE, byteBuffer);
   1040             byteBufferIsReady = true;
   1041         }
   1042 
   1043         public int getTextureId() {
   1044             return textureID;
   1045         }
   1046 
   1047         public Surface getSurface() {
   1048             return surface;
   1049         }
   1050 
   1051         public ByteBuffer getByteBuffer() {
   1052             return byteBuffer;
   1053         }
   1054 
   1055         private int loadShader(int shaderType, String source) {
   1056             int shader = GLES20.glCreateShader(shaderType);
   1057             checkGlError("glCreateShader type=" + shaderType);
   1058             GLES20.glShaderSource(shader, source);
   1059             GLES20.glCompileShader(shader);
   1060             int[] compiled = new int[1];
   1061             GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
   1062 
   1063             if (compiled[0] == 0) {
   1064                 Log.e(TAG, "Could not compile shader " + shaderType + ":");
   1065                 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
   1066                 GLES20.glDeleteShader(shader);
   1067                 shader = 0;
   1068             }
   1069             return shader;
   1070         }
   1071 
   1072         private int createProgram(String vertexSource, String fragmentSource) {
   1073             int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
   1074             if (vertexShader == 0) {
   1075                 return 0;
   1076             }
   1077             int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
   1078             if (pixelShader == 0) {
   1079                 return 0;
   1080             }
   1081             int program = GLES20.glCreateProgram();
   1082             if (program == 0) {
   1083                 Log.e(TAG, "Could not create program");
   1084             }
   1085             GLES20.glAttachShader(program, vertexShader);
   1086             checkGlError("glAttachShader");
   1087             GLES20.glAttachShader(program, pixelShader);
   1088             checkGlError("glAttachShader");
   1089             GLES20.glLinkProgram(program);
   1090             int[] linkStatus = new int[1];
   1091             GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
   1092 
   1093             if (linkStatus[0] != GLES20.GL_TRUE) {
   1094                 Log.e(TAG, "Could not link program: ");
   1095                 Log.e(TAG, GLES20.glGetProgramInfoLog(program));
   1096                 GLES20.glDeleteProgram(program);
   1097                 program = 0;
   1098             }
   1099             return program;
   1100         }
   1101 
   1102         private void checkEglError(String msg) {
   1103             int error;
   1104             if ((error = egl10.eglGetError()) != EGL10.EGL_SUCCESS) {
   1105                 throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
   1106             }
   1107         }
   1108 
   1109         public void checkGlError(String op) {
   1110             int error;
   1111             if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
   1112                 Log.e(TAG, op + ": glError " + error);
   1113                 throw new RuntimeException(op + ": glError " + error);
   1114             }
   1115         }
   1116 
   1117         public void checkLocation(int location, String label) {
   1118             if (location < 0) {
   1119                 throw new RuntimeException("Unable to locate '" + label + "' in program");
   1120             }
   1121         }
   1122     }
   1123 
   1124 }
   1125 
   1126 /* Definition of a VideoViewSnapshot and a runnable to get a bitmap from a view. */
   1127 abstract class VideoViewSnapshot implements Runnable {
   1128 
   1129     public abstract Bitmap getBitmap();
   1130 
   1131     public abstract boolean isBitmapReady();
   1132 
   1133 }
   1134 
   1135 /* Runnable to get a bitmap from a texture view on the UI thread via a handler. */
   1136 class TextureViewSnapshot extends VideoViewSnapshot {
   1137 
   1138     private final TextureView tv;
   1139     private Bitmap bitmap = null;
   1140 
   1141     public TextureViewSnapshot(TextureView tv) {
   1142         this.tv = DecodeAccuracyTestBase.checkNotNull(tv);
   1143     }
   1144 
   1145     @Override
   1146     public synchronized void run() {
   1147         bitmap = tv.getBitmap();
   1148     }
   1149 
   1150     @Override
   1151     public Bitmap getBitmap() {
   1152         return bitmap;
   1153     }
   1154 
   1155     @Override
   1156     public boolean isBitmapReady() {
   1157         return bitmap != null;
   1158     }
   1159 
   1160 }
   1161 
   1162 /**
   1163  * Method to get bitmap of a {@link SurfaceView}.
   1164  */
   1165 class SurfaceViewSnapshot extends VideoViewSnapshot  {
   1166 
   1167     private static final String TAG = SurfaceViewSnapshot.class.getSimpleName();
   1168     private static final int PIXELCOPY_REQUEST_SLEEP_MS = 30;
   1169     private static final int PIXELCOPY_REQUEST_MAX_ATTEMPTS = 20;
   1170     private static final int PIXELCOPY_TIMEOUT_MS = 1000;
   1171 
   1172     private final Thread copyThread;
   1173     private Bitmap bitmap;
   1174     private int copyResult;
   1175 
   1176     public SurfaceViewSnapshot(final SurfaceView surfaceView, final int width, final int height) {
   1177         this.copyResult = -1;
   1178         this.copyThread = new Thread(new Runnable() {
   1179             @Override
   1180             public void run() {
   1181                 SynchronousPixelCopy copyHelper = new SynchronousPixelCopy();
   1182                 bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888);
   1183                 try {
   1184                     // Wait for SurfaceView to be available.
   1185                     for (int i = 0; i < PIXELCOPY_REQUEST_MAX_ATTEMPTS; i++) {
   1186                         copyResult = copyHelper.request(surfaceView, bitmap);
   1187                         if (copyResult == PixelCopy.SUCCESS) {
   1188                             break;
   1189                         }
   1190                         Thread.sleep(PIXELCOPY_REQUEST_SLEEP_MS);
   1191                     }
   1192                 } catch (InterruptedException e) {
   1193                     Log.w(TAG, "Pixel Copy is stopped/interrupted before it finishes.", e);
   1194                 }
   1195                 copyHelper.release();
   1196             }
   1197         });
   1198         copyThread.start();
   1199     }
   1200 
   1201     @Override
   1202     public synchronized void run() {}
   1203 
   1204     @Override
   1205     public Bitmap getBitmap() {
   1206         return bitmap;
   1207     }
   1208 
   1209     @Override
   1210     public boolean isBitmapReady() {
   1211         return copyResult == PixelCopy.SUCCESS;
   1212     }
   1213 
   1214     public void release() {
   1215         if (copyThread.isAlive()) {
   1216             copyThread.interrupt();
   1217         }
   1218     }
   1219 
   1220     private static class SynchronousPixelCopy implements OnPixelCopyFinishedListener {
   1221 
   1222         private final Handler handler;
   1223         private final HandlerThread thread;
   1224 
   1225         private int status = -1;
   1226 
   1227         public SynchronousPixelCopy() {
   1228             this.thread = new HandlerThread("PixelCopyHelper");
   1229             thread.start();
   1230             this.handler = new Handler(thread.getLooper());
   1231         }
   1232 
   1233         public void release() {
   1234             thread.quit();
   1235         }
   1236 
   1237         public int request(SurfaceView source, Bitmap dest) {
   1238             synchronized (this) {
   1239                 try {
   1240                     PixelCopy.request(source, dest, this, handler);
   1241                     return getResultLocked();
   1242                 } catch (Exception e) {
   1243                     Log.e(TAG, "Exception occurred when copying a SurfaceView.", e);
   1244                     return -1;
   1245                 }
   1246             }
   1247         }
   1248 
   1249         private int getResultLocked() {
   1250             try {
   1251                 this.wait(PIXELCOPY_TIMEOUT_MS);
   1252             } catch (InterruptedException e) { /* PixelCopy request didn't complete within 1s */ }
   1253             return status;
   1254         }
   1255 
   1256         @Override
   1257         public void onPixelCopyFinished(int copyResult) {
   1258             synchronized (this) {
   1259                 status = copyResult;
   1260                 this.notify();
   1261             }
   1262         }
   1263 
   1264     }
   1265 
   1266 }
   1267 
   1268 /**
   1269  * Runnable to get a bitmap from a GLSurfaceView on the UI thread via a handler.
   1270  * Note, because of how the bitmap is captured in GLSurfaceView,
   1271  * this method does not have to be a runnable.
   1272  */
   1273 class GLSurfaceViewSnapshot extends VideoViewSnapshot {
   1274 
   1275     private static final String TAG = GLSurfaceViewSnapshot.class.getSimpleName();
   1276     private static final int GET_BYTEBUFFER_SLEEP_MS = 30;
   1277     private static final int GET_BYTEBUFFER_MAX_ATTEMPTS = 20;
   1278 
   1279     private final GLSurfaceViewFactory glSurfaceViewFactory;
   1280     private final int width;
   1281     private final int height;
   1282 
   1283     private Bitmap bitmap = null;
   1284     private boolean bitmapIsReady = false;
   1285 
   1286     public GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height) {
   1287         this.glSurfaceViewFactory = DecodeAccuracyTestBase.checkNotNull(glSurfaceViewFactory);
   1288         this.width = width;
   1289         this.height = height;
   1290     }
   1291 
   1292     @Override
   1293     public synchronized void run() {
   1294         try {
   1295             waitForByteBuffer();
   1296         } catch (InterruptedException e) {
   1297             Log.w(TAG, e.getMessage());
   1298             Log.w(TAG, "ByteBuffer may contain incorrect pixels.");
   1299         }
   1300         // Get ByteBuffer anyway. Let the test fail if ByteBuffer contains incorrect pixels.
   1301         ByteBuffer byteBuffer = glSurfaceViewFactory.getByteBuffer();
   1302         bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
   1303         byteBuffer.rewind();
   1304         bitmap.copyPixelsFromBuffer(byteBuffer);
   1305         bitmapIsReady = true;
   1306     }
   1307 
   1308     @Override
   1309     public Bitmap getBitmap() {
   1310         return bitmap;
   1311     }
   1312 
   1313     @Override
   1314     public boolean isBitmapReady() {
   1315         return bitmapIsReady;
   1316     }
   1317 
   1318     public void waitForByteBuffer() throws InterruptedException {
   1319         // Wait for byte buffer to be ready.
   1320         for (int i = 0; i < GET_BYTEBUFFER_MAX_ATTEMPTS; i++) {
   1321             if (glSurfaceViewFactory.byteBufferIsReady()) {
   1322                 return;
   1323             }
   1324             Thread.sleep(GET_BYTEBUFFER_SLEEP_MS);
   1325         }
   1326         throw new InterruptedException("Taking too long to read pixels into a ByteBuffer.");
   1327     }
   1328 
   1329 }
   1330 
   1331 /* Stores information of a video. */
   1332 class VideoFormat {
   1333 
   1334     public static final int UNSET = -1;
   1335     public static final String MIMETYPE_UNSET = "UNSET";
   1336     public static final String MIMETYPE_KEY = "mimeType";
   1337     public static final String WIDTH_KEY = "width";
   1338     public static final String HEIGHT_KEY = "height";
   1339     public static final String FRAMERATE_KEY = "frameRate";
   1340 
   1341     private final String filename;
   1342     private Uri uri;
   1343     private String mimeType = MIMETYPE_UNSET;
   1344     private int width = UNSET;
   1345     private int height = UNSET;
   1346     private int maxWidth = UNSET;
   1347     private int maxHeight = UNSET;
   1348     private int originalWidth = UNSET;
   1349     private int originalHeight = UNSET;
   1350 
   1351     public VideoFormat(String filename, Uri uri) {
   1352         this.filename = filename;
   1353         this.uri = uri;
   1354     }
   1355 
   1356     public VideoFormat(String filename) {
   1357         this(filename, null);
   1358     }
   1359 
   1360     public VideoFormat(VideoFormat videoFormat) {
   1361         this(videoFormat.filename, videoFormat.uri);
   1362     }
   1363 
   1364     public Uri loadUri(Context context) {
   1365         uri = createCacheFile(context);
   1366         return uri;
   1367     }
   1368 
   1369     public Uri getUri() {
   1370         return uri;
   1371     }
   1372 
   1373     public String getFilename() {
   1374         return filename;
   1375     }
   1376 
   1377     public void setMimeType(String mimeType) {
   1378         this.mimeType = mimeType;
   1379     }
   1380 
   1381     public String getMimeType() {
   1382         return mimeType;
   1383     }
   1384 
   1385     public void setWidth(int width) {
   1386         this.width = width;
   1387         if (this.originalWidth == UNSET) {
   1388             this.originalWidth = width;
   1389         }
   1390     }
   1391 
   1392     public void setMaxWidth(int maxWidth) {
   1393         this.maxWidth = maxWidth;
   1394     }
   1395 
   1396     public int getWidth() {
   1397         return width;
   1398     }
   1399 
   1400     public int getMaxWidth() {
   1401         return maxWidth;
   1402     }
   1403 
   1404     public int getOriginalWidth() {
   1405         return originalWidth;
   1406     }
   1407 
   1408     public void setHeight(int height) {
   1409         this.height = height;
   1410         if (this.originalHeight == UNSET) {
   1411             this.originalHeight = height;
   1412         }
   1413     }
   1414 
   1415     public void setMaxHeight(int maxHeight) {
   1416         this.maxHeight = maxHeight;
   1417     }
   1418 
   1419     public int getHeight() {
   1420         return height;
   1421     }
   1422 
   1423     public int getMaxHeight() {
   1424         return maxHeight;
   1425     }
   1426 
   1427     public int getOriginalHeight() {
   1428         return originalHeight;
   1429     }
   1430 
   1431     private Uri createCacheFile(Context context) {
   1432         try {
   1433             File cacheFile = new File(context.getCacheDir(), filename);
   1434             if (cacheFile.createNewFile() == false) {
   1435                 cacheFile.delete();
   1436                 cacheFile.createNewFile();
   1437             }
   1438             InputStream inputStream = context.getAssets().open(filename);
   1439             FileOutputStream fileOutputStream = new FileOutputStream(cacheFile);
   1440             final int bufferSize = 1024 * 512;
   1441             byte[] buffer = new byte[bufferSize];
   1442 
   1443             while (inputStream.read(buffer) != -1) {
   1444                 fileOutputStream.write(buffer, 0, bufferSize);
   1445             }
   1446             fileOutputStream.close();
   1447             inputStream.close();
   1448             return Uri.fromFile(cacheFile);
   1449         } catch (IOException e) {
   1450             e.printStackTrace();
   1451             return null;
   1452         }
   1453     }
   1454 
   1455 }
   1456 
   1457 /**
   1458  * Compares bitmaps to determine if they are similar.
   1459  *
   1460  * <p>To determine greatest pixel difference we transform each pixel into the
   1461  * CIE L*a*b* color space. The euclidean distance formula is used to determine pixel differences.
   1462  */
   1463 class BitmapCompare {
   1464 
   1465     private static final int RED = 0;
   1466     private static final int GREEN = 1;
   1467     private static final int BLUE = 2;
   1468     private static final int X = 0;
   1469     private static final int Y = 1;
   1470     private static final int Z = 2;
   1471 
   1472     private static SparseArray<double[]> pixelTransformCache = new SparseArray<>();
   1473 
   1474     private BitmapCompare() {}
   1475 
   1476     /**
   1477      * Produces greatest pixel between two bitmaps. Used to determine bitmap similarity.
   1478      *
   1479      * @param bitmap1 A bitmap to compare to bitmap2.
   1480      * @param bitmap2 A bitmap to compare to bitmap1.
   1481      * @return A {@link Difference} with an integer describing the greatest pixel difference,
   1482      *     using {@link Integer#MAX_VALUE} for completely different bitmaps, and an optional
   1483      *     {@link Pair<Integer, Integer>} of the (col, row) pixel coordinate
   1484      *     where it was first found.
   1485      */
   1486     @TargetApi(12)
   1487     public static Difference computeDifference(Bitmap bitmap1, Bitmap bitmap2) {
   1488         if ((bitmap1 == null || bitmap2 == null) && bitmap1 != bitmap2) {
   1489             return new Difference(Integer.MAX_VALUE);
   1490         }
   1491         if (bitmap1 == bitmap2 || bitmap1.sameAs(bitmap2)) {
   1492             return new Difference(0);
   1493         }
   1494         if (bitmap1.getHeight() != bitmap2.getHeight() || bitmap1.getWidth() != bitmap2.getWidth()) {
   1495             return new Difference(Integer.MAX_VALUE);
   1496         }
   1497         // Convert all pixels to CIE L*a*b* color space so we can do a direct color comparison using
   1498         // euclidean distance formula.
   1499         final double[][] pixels1 = convertRgbToCieLab(bitmap1);
   1500         final double[][] pixels2 = convertRgbToCieLab(bitmap2);
   1501         int greatestDifference = 0;
   1502         int greatestDifferenceIndex = -1;
   1503         for (int i = 0; i < pixels1.length; i++) {
   1504             final int difference = euclideanDistance(pixels1[i], pixels2[i]);
   1505             if (difference > greatestDifference) {
   1506                 greatestDifference = difference;
   1507                 greatestDifferenceIndex = i;
   1508             }
   1509         }
   1510         return new Difference(greatestDifference, Pair.create(
   1511             greatestDifferenceIndex % bitmap1.getWidth(),
   1512             greatestDifferenceIndex / bitmap1.getHeight()));
   1513     }
   1514 
   1515     private static double[][] convertRgbToCieLab(Bitmap bitmap) {
   1516         final double[][] result = new double[bitmap.getHeight() * bitmap.getWidth()][3];
   1517         final int pixels[] = new int[bitmap.getHeight() * bitmap.getWidth()];
   1518         bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
   1519         for (int i = 0; i < pixels.length; i++) {
   1520             final double[] transformedColor = pixelTransformCache.get(pixels[i]);
   1521             if (transformedColor != null) {
   1522                 result[i] = transformedColor;
   1523             } else {
   1524                 result[i] = convertXyzToCieLab(convertRgbToXyz(pixels[i]));
   1525                 pixelTransformCache.put(pixels[i], result[i]);
   1526             }
   1527         }
   1528         return result;
   1529     }
   1530 
   1531     /**
   1532      * Conversion from RGB to XYZ based algorithm as defined by:
   1533      * http://www.easyrgb.com/index.php?X=MATH&H=02#text2
   1534      *
   1535      * <p><pre>{@code
   1536      *   var_R = ( R / 255 )        //R from 0 to 255
   1537      *   var_G = ( G / 255 )        //G from 0 to 255
   1538      *   var_B = ( B / 255 )        //B from 0 to 255
   1539      *
   1540      *   if ( var_R > 0.04045 ) var_R = ( ( var_R + 0.055 ) / 1.055 ) ^ 2.4
   1541      *   else                   var_R = var_R / 12.92
   1542      *   if ( var_G > 0.04045 ) var_G = ( ( var_G + 0.055 ) / 1.055 ) ^ 2.4
   1543      *   else                   var_G = var_G / 12.92
   1544      *   if ( var_B > 0.04045 ) var_B = ( ( var_B + 0.055 ) / 1.055 ) ^ 2.4
   1545      *   else                   var_B = var_B / 12.92
   1546      *
   1547      *   var_R = var_R * 100
   1548      *   var_G = var_G * 100
   1549      *   var_B = var_B * 100
   1550      *
   1551      *   // Observer. = 2, Illuminant = D65
   1552      *   X = var_R * 0.4124 + var_G * 0.3576 + var_B * 0.1805
   1553      *   Y = var_R * 0.2126 + var_G * 0.7152 + var_B * 0.0722
   1554      *   Z = var_R * 0.0193 + var_G * 0.1192 + var_B * 0.9505
   1555      * }</pre>
   1556      *
   1557      * @param rgbColor A packed int made up of 4 bytes: alpha, red, green, blue.
   1558      * @return An array of doubles where each value is a component of the XYZ color space.
   1559      */
   1560     private static double[] convertRgbToXyz(int rgbColor) {
   1561         final double[] comp = {Color.red(rgbColor), Color.green(rgbColor), Color.blue(rgbColor)};
   1562 
   1563         for (int i = 0; i < comp.length; i++) {
   1564             comp[i] /= 255.0;
   1565             if (comp[i] > 0.04045) {
   1566                 comp[i] = Math.pow((comp[i] + 0.055) / 1.055, 2.4);
   1567             } else {
   1568                 comp[i] /= 12.92;
   1569             }
   1570             comp[i] *= 100;
   1571         }
   1572         final double x = (comp[RED] * 0.4124) + (comp[GREEN] * 0.3576) + (comp[BLUE] * 0.1805);
   1573         final double y = (comp[RED] * 0.2126) + (comp[GREEN] * 0.7152) + (comp[BLUE] * 0.0722);
   1574         final double z = (comp[RED] * 0.0193) + (comp[GREEN] * 0.1192) + (comp[BLUE] * 0.9505);
   1575         return new double[] {x, y, z};
   1576     }
   1577 
   1578     /**
   1579      * Conversion from XYZ to CIE-L*a*b* based algorithm as defined by:
   1580      * http://www.easyrgb.com/index.php?X=MATH&H=07#text7
   1581      *
   1582      * <p><pre>
   1583      * {@code
   1584      *   var_X = X / ref_X          //ref_X =  95.047   Observer= 2, Illuminant= D65
   1585      *   var_Y = Y / ref_Y          //ref_Y = 100.000
   1586      *   var_Z = Z / ref_Z          //ref_Z = 108.883
   1587      *
   1588      *   if ( var_X > 0.008856 ) var_X = var_X ^ ( 1/3 )
   1589      *   else                    var_X = ( 7.787 * var_X ) + ( 16 / 116 )
   1590      *   if ( var_Y > 0.008856 ) var_Y = var_Y ^ ( 1/3 )
   1591      *   else                    var_Y = ( 7.787 * var_Y ) + ( 16 / 116 )
   1592      *   if ( var_Z > 0.008856 ) var_Z = var_Z ^ ( 1/3 )
   1593      *   else                    var_Z = ( 7.787 * var_Z ) + ( 16 / 116 )
   1594      *
   1595      *   CIE-L* = ( 116 * var_Y ) - 16
   1596      *   CIE-a* = 500 * ( var_X - var_Y )
   1597      *   CIE-b* = 200 * ( var_Y - var_Z )
   1598      * }
   1599      * </pre>
   1600      *
   1601      * @param comp An array of doubles where each value is a component of the XYZ color space.
   1602      * @return An array of doubles where each value is a component of the CIE-L*a*b* color space.
   1603      */
   1604     private static double[] convertXyzToCieLab(double[] comp) {
   1605         comp[X] /= 95.047;
   1606         comp[Y] /= 100.0;
   1607         comp[Z] /= 108.883;
   1608 
   1609         for (int i = 0; i < comp.length; i++) {
   1610             if (comp[i] > 0.008856) {
   1611                 comp[i] = Math.pow(comp[i], (1.0 / 3.0));
   1612             } else {
   1613                 comp[i] = (7.787 * comp[i]) + (16.0 / 116.0);
   1614             }
   1615         }
   1616         final double l = (116 * comp[Y]) - 16;
   1617         final double a = 500 * (comp[X] - comp[Y]);
   1618         final double b = 200 * (comp[Y] - comp[Z]);
   1619         return new double[] {l, a, b};
   1620     }
   1621 
   1622     private static int euclideanDistance(double[] p1, double[] p2) {
   1623         if (p1.length != p2.length) {
   1624             return Integer.MAX_VALUE;
   1625         }
   1626         double result = 0;
   1627         for (int i = 0; i < p1.length; i++) {
   1628             result += Math.pow(p1[i] - p2[i], 2);
   1629         }
   1630         return (int) Math.round(Math.sqrt(result));
   1631     }
   1632 
   1633     /**
   1634      * Crops the border of the array representing an image by hBorderSize
   1635      * pixels on the left and right borders, and by vBorderSize pixels on the
   1636      * top and bottom borders (so the width is 2 * hBorderSize smaller and
   1637      * the height is 2 * vBorderSize smaller), then scales the image up to
   1638      * match the original size using bilinear interpolation.
   1639      */
   1640     private static Bitmap shrinkAndScaleBilinear(
   1641             Bitmap input, double hBorderSize, double vBorderSize) {
   1642 
   1643         int width = input.getWidth();
   1644         int height = input.getHeight();
   1645 
   1646         // Compute the proper step sizes
   1647         double xInc = ((double) width - 1 - hBorderSize * 2) / (double) (width - 1);
   1648         double yInc = ((double) height - 1 - vBorderSize * 2) / (double) (height - 1);
   1649 
   1650         // Read the input bitmap into RGB arrays.
   1651         int[] inputPixels = new int[width * height];
   1652         input.getPixels(inputPixels, 0, width, 0, 0, width, height);
   1653         int[][] inputRgb = new int[width * height][3];
   1654         for (int i = 0; i < width * height; ++i) {
   1655             inputRgb[i][0] = Color.red(inputPixels[i]);
   1656             inputRgb[i][1] = Color.green(inputPixels[i]);
   1657             inputRgb[i][2] = Color.blue(inputPixels[i]);
   1658         }
   1659         inputPixels = null;
   1660 
   1661         // Prepare the output buffer.
   1662         int[] outputPixels = new int[width * height];
   1663 
   1664         // Start the iteration. The first y coordinate is vBorderSize.
   1665         double y = vBorderSize;
   1666         for (int yIndex = 0; yIndex < height; ++yIndex) {
   1667             // The first x coordinate is hBorderSize.
   1668             double x = hBorderSize;
   1669             for (int xIndex = 0; xIndex < width; ++xIndex) {
   1670                 // Determine the square of interest.
   1671                 int left = (int)x;    // This is floor(x).
   1672                 int top = (int)y;     // This is floor(y).
   1673                 int right = left + 1;
   1674                 int bottom = top + 1;
   1675 
   1676                 // (u, v) is the fractional part of (x, y).
   1677                 double u = x - (double)left;
   1678                 double v = y - (double)top;
   1679 
   1680                 // Precompute necessary products to save time.
   1681                 double p00 = (1.0 - u) * (1.0 - v);
   1682                 double p01 = (1.0 - u) * v;
   1683                 double p10 = u * (1.0 - v);
   1684                 double p11 = u * v;
   1685 
   1686                 // Clamp the indices to prevent out-of-bound that may be caused
   1687                 // by round-off error.
   1688                 if (left >= width) left = width - 1;
   1689                 if (top >= height) top = height - 1;
   1690                 if (right >= width) right = width - 1;
   1691                 if (bottom >= height) bottom = height - 1;
   1692 
   1693                 // Sample RGB values from the four corners.
   1694                 int[] rgb00 = inputRgb[top * width + left];
   1695                 int[] rgb01 = inputRgb[bottom * width + left];
   1696                 int[] rgb10 = inputRgb[top * width + right];
   1697                 int[] rgb11 = inputRgb[bottom * width + right];
   1698 
   1699                 // Interpolate each component of RGB separately.
   1700                 int[] mixedColor = new int[3];
   1701                 for (int k = 0; k < 3; ++k) {
   1702                     mixedColor[k] = (int)Math.round(
   1703                             p00 * (double) rgb00[k] + p01 * (double) rgb01[k]
   1704                             + p10 * (double) rgb10[k] + p11 * (double) rgb11[k]);
   1705                 }
   1706                 // Convert RGB to bitmap Color format and store.
   1707                 outputPixels[yIndex * width + xIndex] = Color.rgb(
   1708                         mixedColor[0], mixedColor[1], mixedColor[2]);
   1709                 x += xInc;
   1710             }
   1711             y += yInc;
   1712         }
   1713         // Assemble the output buffer into a Bitmap object.
   1714         return Bitmap.createBitmap(outputPixels, width, height, input.getConfig());
   1715     }
   1716 
   1717     /**
   1718      * Calls computeDifference on multiple cropped-and-scaled versions of
   1719      * bitmap2.
   1720      */
   1721     @TargetApi(12)
   1722     public static Difference computeMinimumDifference(
   1723             Bitmap bitmap1, Bitmap bitmap2, Pair<Double, Double>[] borderCrops) {
   1724 
   1725         // Compute the difference with the original image (bitmap2) first.
   1726         Difference minDiff = computeDifference(bitmap1, bitmap2);
   1727         // Then go through the list of borderCrops.
   1728         for (Pair<Double, Double> borderCrop : borderCrops) {
   1729             // Compute the difference between bitmap1 and a transformed
   1730             // version of bitmap2.
   1731             Bitmap bitmap2s = shrinkAndScaleBilinear(bitmap2, borderCrop.first, borderCrop.second);
   1732             Difference d = computeDifference(bitmap1, bitmap2s);
   1733             // Keep the minimum difference.
   1734             if (d.greatestPixelDifference < minDiff.greatestPixelDifference) {
   1735                 minDiff = d;
   1736                 minDiff.bestMatchBorderCrop = borderCrop;
   1737             }
   1738         }
   1739         return minDiff;
   1740     }
   1741 
   1742     /**
   1743      * Calls computeMinimumDifference on a default list of borderCrop.
   1744      */
   1745     @TargetApi(12)
   1746     public static Difference computeMinimumDifference(
   1747             Bitmap bitmap1, Bitmap bitmap2, int trueWidth, int trueHeight) {
   1748 
   1749         double hBorder = (double) bitmap1.getWidth() / (double) trueWidth;
   1750         double vBorder = (double) bitmap1.getHeight() / (double) trueHeight;
   1751         double hBorderH = 0.5 * hBorder; // Half-texel horizontal border
   1752         double vBorderH = 0.5 * vBorder; // Half-texel vertical border
   1753         return computeMinimumDifference(
   1754                 bitmap1,
   1755                 bitmap2,
   1756                 new Pair[] {
   1757                     Pair.create(hBorderH, 0.0),
   1758                     Pair.create(hBorderH, vBorderH),
   1759                     Pair.create(0.0, vBorderH),
   1760                     Pair.create(hBorder, 0.0),
   1761                     Pair.create(hBorder, vBorder),
   1762                     Pair.create(0.0, vBorder)
   1763                 });
   1764         // This default list of borderCrop comes from the behavior of
   1765         // GLConsumer.computeTransformMatrix().
   1766     }
   1767 
   1768     /* Describes the difference between two {@link Bitmap} instances. */
   1769     public static final class Difference {
   1770 
   1771         public final int greatestPixelDifference;
   1772         public final Pair<Integer, Integer> greatestPixelDifferenceCoordinates;
   1773         public Pair<Double, Double> bestMatchBorderCrop;
   1774 
   1775         private Difference(int greatestPixelDifference) {
   1776             this(greatestPixelDifference, null, Pair.create(0.0, 0.0));
   1777         }
   1778 
   1779         private Difference(
   1780                 int greatestPixelDifference,
   1781                 Pair<Integer, Integer> greatestPixelDifferenceCoordinates) {
   1782             this(greatestPixelDifference, greatestPixelDifferenceCoordinates,
   1783                     Pair.create(0.0, 0.0));
   1784         }
   1785 
   1786         private Difference(
   1787                 int greatestPixelDifference,
   1788                 Pair<Integer, Integer> greatestPixelDifferenceCoordinates,
   1789                 Pair<Double, Double> bestMatchBorderCrop) {
   1790             this.greatestPixelDifference = greatestPixelDifference;
   1791             this.greatestPixelDifferenceCoordinates = greatestPixelDifferenceCoordinates;
   1792             this.bestMatchBorderCrop = bestMatchBorderCrop;
   1793        }
   1794     }
   1795 
   1796 }
   1797 
   1798 /* Wrapper for MIME types. */
   1799 final class MimeTypes {
   1800 
   1801     private MimeTypes() {}
   1802 
   1803     public static final String VIDEO_VP9 = "video/x-vnd.on2.vp9";
   1804     public static final String VIDEO_H264 = "video/avc";
   1805 
   1806     public static boolean isVideo(String mimeType) {
   1807         return mimeType.startsWith("video");
   1808     }
   1809 
   1810 }
   1811