Home | History | Annotate | Download | only in cts
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.media.cts;
     18 
     19 import android.content.Context;
     20 import android.content.res.Resources;
     21 import android.media.MediaCodec;
     22 import android.media.MediaCodec.CodecException;
     23 import android.media.MediaCodecInfo.CodecCapabilities;
     24 import android.media.MediaCodecList;
     25 import android.media.MediaCodecInfo;
     26 import android.media.MediaFormat;
     27 import android.os.Bundle;
     28 import android.os.Environment;
     29 import android.os.Looper;
     30 import android.os.Handler;
     31 import android.test.AndroidTestCase;
     32 import android.util.Log;
     33 import android.media.cts.R;
     34 
     35 import com.android.compatibility.common.util.MediaUtils;
     36 
     37 import java.io.File;
     38 import java.io.FileInputStream;
     39 import java.io.FileOutputStream;
     40 import java.io.InputStream;
     41 import java.nio.ByteBuffer;
     42 import java.util.Locale;
     43 import java.util.ArrayList;
     44 import java.util.concurrent.Callable;
     45 import java.util.concurrent.CountDownLatch;
     46 
     47 /**
     48  * Verification test for vpx encoder and decoder.
     49  *
     50  * A raw yv12 stream is encoded at various settings and written to an IVF
     51  * file. Encoded stream bitrate and key frame interval are checked against target values.
     52  * The stream is later decoded by the decoder to verify frames are decodable and to
     53  * calculate PSNR values for various bitrates.
     54  */
     55 public class VpxCodecTestBase extends AndroidTestCase {
     56 
     57     protected static final String TAG = "VPxCodecTestBase";
     58     protected static final String VP8_MIME = MediaFormat.MIMETYPE_VIDEO_VP8;
     59     protected static final String VP9_MIME = MediaFormat.MIMETYPE_VIDEO_VP9;
     60     protected static final String SDCARD_DIR =
     61             Environment.getExternalStorageDirectory().getAbsolutePath();
     62 
     63     // Default timeout for MediaCodec buffer dequeue - 200 ms.
     64     protected static final long DEFAULT_DEQUEUE_TIMEOUT_US = 200000;
     65     // Default timeout for MediaEncoderAsync - 30 sec.
     66     protected static final long DEFAULT_ENCODE_TIMEOUT_MS = 30000;
     67     // Default sync frame interval in frames
     68     private static final int SYNC_FRAME_INTERVAL = 30;
     69     // Video bitrate type - should be set to OMX_Video_ControlRateConstant from OMX_Video.h
     70     protected static final int VIDEO_ControlRateVariable = 1;
     71     protected static final int VIDEO_ControlRateConstant = 2;
     72     // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
     73     // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
     74     private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
     75     // Allowable color formats supported by codec - in order of preference.
     76     private static final int[] mSupportedColorList = {
     77             CodecCapabilities.COLOR_FormatYUV420Planar,
     78             CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
     79             CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
     80             COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
     81     };
     82     // Scaled image cache list - contains scale factors, for which up-scaled frames
     83     // were calculated and were written to yuv file.
     84     ArrayList<Integer> mScaledImages = new ArrayList<Integer>();
     85 
     86     private Resources mResources;
     87 
     88     @Override
     89     public void setContext(Context context) {
     90         super.setContext(context);
     91         mResources = mContext.getResources();
     92     }
     93 
     94     /**
     95      *  VPx codec properties generated by getVpxCodecProperties() function.
     96      */
     97     private class CodecProperties {
     98         CodecProperties(String codecName, int colorFormat) {
     99             this.codecName = codecName;
    100             this.colorFormat = colorFormat;
    101         }
    102         public final String codecName; // OpenMax component name for VPx codec.
    103         public final int colorFormat;  // Color format supported by codec.
    104     }
    105 
    106     /**
    107      * Function to find VPx codec.
    108      *
    109      * Iterates through the list of available codecs and tries to find
    110      * VPX codec, which can support either YUV420 planar or NV12 color formats.
    111      * If forceGoogleCodec parameter set to true the function always returns
    112      * Google VPX codec.
    113      * If forceGoogleCodec parameter set to false the functions looks for platform
    114      * specific VPX codec first. If no platform specific codec exist, falls back to
    115      * Google VPX codec.
    116      *
    117      * @param isEncoder     Flag if encoder is requested.
    118      * @param forceGoogleCodec  Forces to use Google codec.
    119      */
    120     private CodecProperties getVpxCodecProperties(
    121             boolean isEncoder,
    122             MediaFormat format,
    123             boolean forceGoogleCodec) throws Exception {
    124         CodecProperties codecProperties = null;
    125         String mime = format.getString(MediaFormat.KEY_MIME);
    126 
    127         // Loop through the list of codec components in case platform specific codec
    128         // is requested.
    129         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
    130         for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
    131             if (isEncoder != codecInfo.isEncoder()) {
    132                 continue;
    133             }
    134             Log.v(TAG, codecInfo.getName());
    135             // TODO: remove dependence of Google from the test
    136             // Check if this is Google codec - we should ignore it.
    137             if (codecInfo.isVendor() && forceGoogleCodec) {
    138                 continue;
    139             }
    140 
    141             for (String type : codecInfo.getSupportedTypes()) {
    142                 if (!type.equalsIgnoreCase(mime)) {
    143                     continue;
    144                 }
    145                 CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(type);
    146                 if (!capabilities.isFormatSupported(format)) {
    147                     continue;
    148                 }
    149 
    150                 // Get candidate codec properties.
    151                 Log.v(TAG, "Found candidate codec " + codecInfo.getName());
    152                 for (int colorFormat: capabilities.colorFormats) {
    153                     Log.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
    154                 }
    155 
    156                 // Check supported color formats.
    157                 for (int supportedColorFormat : mSupportedColorList) {
    158                     for (int codecColorFormat : capabilities.colorFormats) {
    159                         if (codecColorFormat == supportedColorFormat) {
    160                             codecProperties = new CodecProperties(codecInfo.getName(),
    161                                     codecColorFormat);
    162                             Log.v(TAG, "Found target codec " + codecProperties.codecName +
    163                                     ". Color: 0x" + Integer.toHexString(codecColorFormat));
    164                             // return first vendor codec (hopefully HW) found
    165                             if (!codecInfo.isVendor()) {
    166                                 return codecProperties;
    167                             }
    168                         }
    169                     }
    170                 }
    171             }
    172         }
    173         if (codecProperties == null) {
    174             Log.i(TAG, "no suitable " + (forceGoogleCodec ? "google " : "")
    175                     + (isEncoder ? "encoder " : "decoder ") + "found for " + format);
    176         }
    177         return codecProperties;
    178     }
    179 
    180     /**
    181      * Parameters for encoded video stream.
    182      */
    183     protected class EncoderOutputStreamParameters {
    184         // Name of raw YUV420 input file. When the value of this parameter
    185         // is set to null input file descriptor from inputResourceId parameter
    186         // is used instead.
    187         public String inputYuvFilename;
    188         // Name of scaled YUV420 input file.
    189         public String scaledYuvFilename;
    190         // File descriptor for the raw input file (YUV420). Used only if
    191         // inputYuvFilename parameter is null.
    192         int inputResourceId;
    193         // Name of the IVF file to write encoded bitsream
    194         public String outputIvfFilename;
    195         // Mime Type of the Encoded content.
    196         public String codecMimeType;
    197         // Force to use Google VPx encoder.
    198         boolean forceGoogleEncoder;
    199         // Number of frames to encode.
    200         int frameCount;
    201         // Frame rate of input file in frames per second.
    202         int frameRate;
    203         // Encoded frame width.
    204         public int frameWidth;
    205         // Encoded frame height.
    206         public int frameHeight;
    207         // Encoding bitrate array in bits/second for every frame. If array length
    208         // is shorter than the total number of frames, the last value is re-used for
    209         // all remaining frames. For constant bitrate encoding single element
    210         // array can be used with first element set to target bitrate value.
    211         public int[] bitrateSet;
    212         // Encoding bitrate type - VBR or CBR
    213         public int bitrateType;
    214         // Number of temporal layers
    215         public int temporalLayers;
    216         // Desired key frame interval - codec is asked to generate key frames
    217         // at a period defined by this parameter.
    218         public int syncFrameInterval;
    219         // Optional parameter - forced key frame interval. Used to
    220         // explicitly request the codec to generate key frames using
    221         // MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME parameter.
    222         public int syncForceFrameInterval;
    223         // Buffer timeout
    224         long timeoutDequeue;
    225         // Flag if encoder should run in Looper thread.
    226         boolean runInLooperThread;
    227         // Flag if use NdkMediaCodec
    228         boolean useNdk;
    229     }
    230 
    231     /**
    232      * Generates an array of default parameters for encoder output stream based on
    233      * upscaling value.
    234      */
    235     protected ArrayList<EncoderOutputStreamParameters> getDefaultEncodingParameterList(
    236             String inputYuvName,
    237             String outputIvfBaseName,
    238             String codecMimeType,
    239             int encodeSeconds,
    240             int[] resolutionScales,
    241             int frameWidth,
    242             int frameHeight,
    243             int frameRate,
    244             int bitrateMode,
    245             int[] bitrates,
    246             boolean syncEncoding) {
    247         assertTrue(resolutionScales.length == bitrates.length);
    248         int numCodecs = resolutionScales.length;
    249         ArrayList<EncoderOutputStreamParameters> outputParameters =
    250                 new ArrayList<EncoderOutputStreamParameters>(numCodecs);
    251         for (int i = 0; i < numCodecs; i++) {
    252             EncoderOutputStreamParameters params = new EncoderOutputStreamParameters();
    253             if (inputYuvName != null) {
    254                 params.inputYuvFilename = SDCARD_DIR + File.separator + inputYuvName;
    255             } else {
    256                 params.inputYuvFilename = null;
    257             }
    258             params.scaledYuvFilename = SDCARD_DIR + File.separator +
    259                     outputIvfBaseName + resolutionScales[i]+ ".yuv";
    260             params.inputResourceId = R.raw.football_qvga;
    261             params.codecMimeType = codecMimeType;
    262             String codecSuffix = VP8_MIME.equals(codecMimeType) ? "vp8" : "vp9";
    263             params.outputIvfFilename = SDCARD_DIR + File.separator +
    264                     outputIvfBaseName + resolutionScales[i] + "_" + codecSuffix + ".ivf";
    265             params.forceGoogleEncoder = false;
    266             params.frameCount = encodeSeconds * frameRate;
    267             params.frameRate = frameRate;
    268             params.frameWidth = Math.min(frameWidth * resolutionScales[i], 1280);
    269             params.frameHeight = Math.min(frameHeight * resolutionScales[i], 720);
    270             params.bitrateSet = new int[1];
    271             params.bitrateSet[0] = bitrates[i];
    272             params.bitrateType = bitrateMode;
    273             params.temporalLayers = 0;
    274             params.syncFrameInterval = SYNC_FRAME_INTERVAL;
    275             params.syncForceFrameInterval = 0;
    276             if (syncEncoding) {
    277                 params.timeoutDequeue = DEFAULT_DEQUEUE_TIMEOUT_US;
    278                 params.runInLooperThread = false;
    279             } else {
    280                 params.timeoutDequeue = 0;
    281                 params.runInLooperThread = true;
    282             }
    283             outputParameters.add(params);
    284         }
    285         return outputParameters;
    286     }
    287 
    288     protected EncoderOutputStreamParameters getDefaultEncodingParameters(
    289             String inputYuvName,
    290             String outputIvfBaseName,
    291             String codecMimeType,
    292             int encodeSeconds,
    293             int frameWidth,
    294             int frameHeight,
    295             int frameRate,
    296             int bitrateMode,
    297             int bitrate,
    298             boolean syncEncoding) {
    299         int[] scaleValues = { 1 };
    300         int[] bitrates = { bitrate };
    301         return getDefaultEncodingParameterList(
    302                 inputYuvName,
    303                 outputIvfBaseName,
    304                 codecMimeType,
    305                 encodeSeconds,
    306                 scaleValues,
    307                 frameWidth,
    308                 frameHeight,
    309                 frameRate,
    310                 bitrateMode,
    311                 bitrates,
    312                 syncEncoding).get(0);
    313     }
    314 
    315     /**
    316      * Converts (interleaves) YUV420 planar to NV12.
    317      * Assumes packed, macroblock-aligned frame with no cropping
    318      * (visible/coded row length == stride).
    319      */
    320     private static byte[] YUV420ToNV(int width, int height, byte[] yuv) {
    321         byte[] nv = new byte[yuv.length];
    322         // Y plane we just copy.
    323         System.arraycopy(yuv, 0, nv, 0, width * height);
    324 
    325         // U & V plane we interleave.
    326         int u_offset = width * height;
    327         int v_offset = u_offset + u_offset / 4;
    328         int nv_offset = width * height;
    329         for (int i = 0; i < width * height / 4; i++) {
    330             nv[nv_offset++] = yuv[u_offset++];
    331             nv[nv_offset++] = yuv[v_offset++];
    332         }
    333         return nv;
    334     }
    335 
    336     /**
    337      * Converts (de-interleaves) NV12 to YUV420 planar.
    338      * Stride may be greater than width, slice height may be greater than height.
    339      */
    340     private static byte[] NV12ToYUV420(int width, int height,
    341             int stride, int sliceHeight, byte[] nv12) {
    342         byte[] yuv = new byte[width * height * 3 / 2];
    343 
    344         // Y plane we just copy.
    345         for (int i = 0; i < height; i++) {
    346             System.arraycopy(nv12, i * stride, yuv, i * width, width);
    347         }
    348 
    349         // U & V plane - de-interleave.
    350         int u_offset = width * height;
    351         int v_offset = u_offset + u_offset / 4;
    352         int nv_offset;
    353         for (int i = 0; i < height / 2; i++) {
    354             nv_offset = stride * (sliceHeight + i);
    355             for (int j = 0; j < width / 2; j++) {
    356                 yuv[u_offset++] = nv12[nv_offset++];
    357                 yuv[v_offset++] = nv12[nv_offset++];
    358             }
    359         }
    360         return yuv;
    361     }
    362 
    363     /**
    364      * Packs YUV420 frame by moving it to a smaller size buffer with stride and slice
    365      * height equal to the crop window.
    366      */
    367     private static byte[] PackYUV420(int left, int top, int width, int height,
    368             int stride, int sliceHeight, byte[] src) {
    369         byte[] dst = new byte[width * height * 3 / 2];
    370         // Y copy.
    371         for (int i = 0; i < height; i++) {
    372             System.arraycopy(src, (i + top) * stride + left, dst, i * width, width);
    373         }
    374         // U and V copy.
    375         int u_src_offset = stride * sliceHeight;
    376         int v_src_offset = u_src_offset + u_src_offset / 4;
    377         int u_dst_offset = width * height;
    378         int v_dst_offset = u_dst_offset + u_dst_offset / 4;
    379         // Downsample and align to floor-2 for crop origin.
    380         left /= 2;
    381         top /= 2;
    382         for (int i = 0; i < height / 2; i++) {
    383             System.arraycopy(src, u_src_offset + (i + top) * (stride / 2) + left,
    384                     dst, u_dst_offset + i * (width / 2), width / 2);
    385             System.arraycopy(src, v_src_offset + (i + top) * (stride / 2) + left,
    386                     dst, v_dst_offset + i * (width / 2), width / 2);
    387         }
    388         return dst;
    389     }
    390 
    391 
    392     private static void imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride,
    393             byte[] dst, int dstByteOffset, int dstWidth, int dstHeight) {
    394         for (int i = 0; i < dstHeight/2 - 1; i++) {
    395             int dstOffset0 = 2 * i * dstWidth + dstByteOffset;
    396             int dstOffset1 = dstOffset0 + dstWidth;
    397             int srcOffset0 = i * srcStride + srcByteOffset;
    398             int srcOffset1 = srcOffset0 + srcStride;
    399             int pixel00 = (int)src[srcOffset0++] & 0xff;
    400             int pixel10 = (int)src[srcOffset1++] & 0xff;
    401             for (int j = 0; j < dstWidth/2 - 1; j++) {
    402                 int pixel01 = (int)src[srcOffset0++] & 0xff;
    403                 int pixel11 = (int)src[srcOffset1++] & 0xff;
    404                 dst[dstOffset0++] = (byte)pixel00;
    405                 dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2);
    406                 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
    407                 dst[dstOffset1++] = (byte)((pixel00 + pixel01 + pixel10 + pixel11 + 2) / 4);
    408                 pixel00 = pixel01;
    409                 pixel10 = pixel11;
    410             }
    411             // last column
    412             dst[dstOffset0++] = (byte)pixel00;
    413             dst[dstOffset0++] = (byte)pixel00;
    414             dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
    415             dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
    416         }
    417 
    418         // last row
    419         int dstOffset0 = (dstHeight - 2) * dstWidth + dstByteOffset;
    420         int dstOffset1 = dstOffset0 + dstWidth;
    421         int srcOffset0 = (dstHeight/2 - 1) * srcStride + srcByteOffset;
    422         int pixel00 = (int)src[srcOffset0++] & 0xff;
    423         for (int j = 0; j < dstWidth/2 - 1; j++) {
    424             int pixel01 = (int)src[srcOffset0++] & 0xff;
    425             dst[dstOffset0++] = (byte)pixel00;
    426             dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2);
    427             dst[dstOffset1++] = (byte)pixel00;
    428             dst[dstOffset1++] = (byte)((pixel00 + pixel01 + 1) / 2);
    429             pixel00 = pixel01;
    430         }
    431         // the very last pixel - bottom right
    432         dst[dstOffset0++] = (byte)pixel00;
    433         dst[dstOffset0++] = (byte)pixel00;
    434         dst[dstOffset1++] = (byte)pixel00;
    435         dst[dstOffset1++] = (byte)pixel00;
    436     }
    437 
    438     /**
    439     * Up-scale image.
    440     * Scale factor is defined by source and destination width ratio.
    441     * Only 1:2 and 1:4 up-scaling is supported for now.
    442     * For 640x480 -> 1280x720 conversion only top 640x360 part of the original
    443     * image is scaled.
    444     */
    445     private static byte[] imageScale(byte[] src, int srcWidth, int srcHeight,
    446             int dstWidth, int dstHeight) throws Exception {
    447         int srcYSize = srcWidth * srcHeight;
    448         int dstYSize = dstWidth * dstHeight;
    449         byte[] dst = null;
    450         if (dstWidth == 2 * srcWidth && dstHeight <= 2 * srcHeight) {
    451             // 1:2 upscale
    452             dst = new byte[dstWidth * dstHeight * 3 / 2];
    453             imageUpscale1To2(src, 0, srcWidth,
    454                     dst, 0, dstWidth, dstHeight);                                 // Y
    455             imageUpscale1To2(src, srcYSize, srcWidth / 2,
    456                     dst, dstYSize, dstWidth / 2, dstHeight / 2);                  // U
    457             imageUpscale1To2(src, srcYSize * 5 / 4, srcWidth / 2,
    458                     dst, dstYSize * 5 / 4, dstWidth / 2, dstHeight / 2);          // V
    459         } else if (dstWidth == 4 * srcWidth && dstHeight <= 4 * srcHeight) {
    460             // 1:4 upscale - in two steps
    461             int midWidth = 2 * srcWidth;
    462             int midHeight = 2 * srcHeight;
    463             byte[] midBuffer = imageScale(src, srcWidth, srcHeight, midWidth, midHeight);
    464             dst = imageScale(midBuffer, midWidth, midHeight, dstWidth, dstHeight);
    465 
    466         } else {
    467             throw new RuntimeException("Can not find proper scaling function");
    468         }
    469 
    470         return dst;
    471     }
    472 
    473     private void cacheScaledImage(
    474             String srcYuvFilename, int srcResourceId, int srcFrameWidth, int srcFrameHeight,
    475             String dstYuvFilename, int dstFrameWidth, int dstFrameHeight) throws Exception {
    476         InputStream srcStream = OpenFileOrResourceId(srcYuvFilename, srcResourceId);
    477         FileOutputStream dstFile = new FileOutputStream(dstYuvFilename, false);
    478         int srcFrameSize = srcFrameWidth * srcFrameHeight * 3 / 2;
    479         byte[] srcFrame = new byte[srcFrameSize];
    480         byte[] dstFrame = null;
    481         Log.d(TAG, "Scale to " + dstFrameWidth + " x " + dstFrameHeight + ". -> " + dstYuvFilename);
    482         while (true) {
    483             int bytesRead = srcStream.read(srcFrame);
    484             if (bytesRead != srcFrame.length) {
    485                 break;
    486             }
    487             if (dstFrameWidth == srcFrameWidth && dstFrameHeight == srcFrameHeight) {
    488                 dstFrame = srcFrame;
    489             } else {
    490                 dstFrame = imageScale(srcFrame, srcFrameWidth, srcFrameHeight,
    491                         dstFrameWidth, dstFrameHeight);
    492             }
    493             dstFile.write(dstFrame);
    494         }
    495         srcStream.close();
    496         dstFile.close();
    497     }
    498 
    499 
    500     /**
    501      * A basic check if an encoded stream is decodable.
    502      *
    503      * The most basic confirmation we can get about a frame
    504      * being properly encoded is trying to decode it.
    505      * (Especially in realtime mode encode output is non-
    506      * deterministic, therefore a more thorough check like
    507      * md5 sum comparison wouldn't work.)
    508      *
    509      * Indeed, MediaCodec will raise an IllegalStateException
    510      * whenever vpx decoder fails to decode a frame, and
    511      * this test uses that fact to verify the bitstream.
    512      *
    513      * @param inputIvfFilename  The name of the IVF file containing encoded bitsream.
    514      * @param outputYuvFilename The name of the output YUV file (optional).
    515      * @param frameRate         Frame rate of input file in frames per second
    516      * @param forceGoogleDecoder    Force to use Google VPx decoder.
    517      */
    518     protected ArrayList<MediaCodec.BufferInfo> decode(
    519             String inputIvfFilename,
    520             String outputYuvFilename,
    521             String codecMimeType,
    522             int frameRate,
    523             boolean forceGoogleDecoder) throws Exception {
    524         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
    525 
    526         // Open input/output.
    527         IvfReader ivf = new IvfReader(inputIvfFilename);
    528         int frameWidth = ivf.getWidth();
    529         int frameHeight = ivf.getHeight();
    530         int frameCount = ivf.getFrameCount();
    531         int frameStride = frameWidth;
    532         int frameSliceHeight = frameHeight;
    533         int cropLeft = 0;
    534         int cropTop = 0;
    535         int cropWidth = frameWidth;
    536         int cropHeight = frameHeight;
    537         assertTrue(frameWidth > 0);
    538         assertTrue(frameHeight > 0);
    539         assertTrue(frameCount > 0);
    540 
    541         // Create decoder.
    542         MediaFormat format = MediaFormat.createVideoFormat(
    543                 codecMimeType, ivf.getWidth(), ivf.getHeight());
    544         CodecProperties properties = getVpxCodecProperties(
    545                 false /* encoder */, format, forceGoogleDecoder);
    546         if (properties == null) {
    547             ivf.close();
    548             return null;
    549         }
    550         int frameColorFormat = properties.colorFormat;
    551         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
    552 
    553         FileOutputStream yuv = null;
    554         if (outputYuvFilename != null) {
    555             yuv = new FileOutputStream(outputYuvFilename, false);
    556         }
    557 
    558         Log.d(TAG, "Creating decoder " + properties.codecName +
    559                 ". Color format: 0x" + Integer.toHexString(frameColorFormat) +
    560                 ". " + frameWidth + " x " + frameHeight);
    561         Log.d(TAG, "  Format: " + format);
    562         Log.d(TAG, "  In: " + inputIvfFilename + ". Out:" + outputYuvFilename);
    563         MediaCodec decoder = MediaCodec.createByCodecName(properties.codecName);
    564         decoder.configure(format,
    565                           null,  // surface
    566                           null,  // crypto
    567                           0);    // flags
    568         decoder.start();
    569 
    570         ByteBuffer[] inputBuffers = decoder.getInputBuffers();
    571         ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
    572         MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
    573 
    574         // decode loop
    575         int inputFrameIndex = 0;
    576         int outputFrameIndex = 0;
    577         long inPresentationTimeUs = 0;
    578         long outPresentationTimeUs = 0;
    579         boolean sawOutputEOS = false;
    580         boolean sawInputEOS = false;
    581 
    582         while (!sawOutputEOS) {
    583             if (!sawInputEOS) {
    584                 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_DEQUEUE_TIMEOUT_US);
    585                 if (inputBufIndex >= 0) {
    586                     byte[] frame = ivf.readFrame(inputFrameIndex);
    587 
    588                     if (inputFrameIndex == frameCount - 1) {
    589                         Log.d(TAG, "  Input EOS for frame # " + inputFrameIndex);
    590                         sawInputEOS = true;
    591                     }
    592 
    593                     inputBuffers[inputBufIndex].clear();
    594                     inputBuffers[inputBufIndex].put(frame);
    595                     inputBuffers[inputBufIndex].rewind();
    596                     inPresentationTimeUs = (inputFrameIndex * 1000000) / frameRate;
    597 
    598                     decoder.queueInputBuffer(
    599                             inputBufIndex,
    600                             0,  // offset
    601                             frame.length,
    602                             inPresentationTimeUs,
    603                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
    604 
    605                     inputFrameIndex++;
    606                 }
    607             }
    608 
    609             int result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US);
    610             while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
    611                     result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    612                 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    613                     outputBuffers = decoder.getOutputBuffers();
    614                 } else  if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    615                     // Process format change
    616                     format = decoder.getOutputFormat();
    617                     frameWidth = format.getInteger(MediaFormat.KEY_WIDTH);
    618                     frameHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
    619                     frameColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
    620                     Log.d(TAG, "Decoder output format change. Color: 0x" +
    621                             Integer.toHexString(frameColorFormat));
    622                     Log.d(TAG, "Format: " + format.toString());
    623 
    624                     // Parse frame and slice height from undocumented values
    625                     if (format.containsKey("stride")) {
    626                         frameStride = format.getInteger("stride");
    627                     } else {
    628                         frameStride = frameWidth;
    629                     }
    630                     if (format.containsKey("slice-height")) {
    631                         frameSliceHeight = format.getInteger("slice-height");
    632                     } else {
    633                         frameSliceHeight = frameHeight;
    634                     }
    635                     Log.d(TAG, "Frame stride and slice height: " + frameStride +
    636                             " x " + frameSliceHeight);
    637                     frameStride = Math.max(frameWidth, frameStride);
    638                     frameSliceHeight = Math.max(frameHeight, frameSliceHeight);
    639 
    640                     // Parse crop window for the area of recording decoded frame data.
    641                     if (format.containsKey("crop-left")) {
    642                         cropLeft = format.getInteger("crop-left");
    643                     }
    644                     if (format.containsKey("crop-top")) {
    645                         cropTop = format.getInteger("crop-top");
    646                     }
    647                     if (format.containsKey("crop-right")) {
    648                         cropWidth = format.getInteger("crop-right") - cropLeft + 1;
    649                     } else {
    650                         cropWidth = frameWidth;
    651                     }
    652                     if (format.containsKey("crop-bottom")) {
    653                         cropHeight = format.getInteger("crop-bottom") - cropTop + 1;
    654                     } else {
    655                         cropHeight = frameHeight;
    656                     }
    657                     Log.d(TAG, "Frame crop window origin: " + cropLeft + " x " + cropTop
    658                             + ", size: " + cropWidth + " x " + cropHeight);
    659                     cropWidth = Math.min(frameWidth - cropLeft, cropWidth);
    660                     cropHeight = Math.min(frameHeight - cropTop, cropHeight);
    661                 }
    662                 result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US);
    663             }
    664             if (result >= 0) {
    665                 int outputBufIndex = result;
    666                 outPresentationTimeUs = bufferInfo.presentationTimeUs;
    667                 Log.v(TAG, "Writing buffer # " + outputFrameIndex +
    668                         ". Size: " + bufferInfo.size +
    669                         ". InTime: " + (inPresentationTimeUs + 500)/1000 +
    670                         ". OutTime: " + (outPresentationTimeUs + 500)/1000);
    671                 if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    672                     sawOutputEOS = true;
    673                     Log.d(TAG, "   Output EOS for frame # " + outputFrameIndex);
    674                 }
    675 
    676                 if (bufferInfo.size > 0) {
    677                     // Save decoder output to yuv file.
    678                     if (yuv != null) {
    679                         byte[] frame = new byte[bufferInfo.size];
    680                         outputBuffers[outputBufIndex].position(bufferInfo.offset);
    681                         outputBuffers[outputBufIndex].get(frame, 0, bufferInfo.size);
    682                         // Convert NV12 to YUV420 if necessary.
    683                         if (frameColorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
    684                             frame = NV12ToYUV420(frameWidth, frameHeight,
    685                                     frameStride, frameSliceHeight, frame);
    686                         }
    687                         int writeLength = Math.min(cropWidth * cropHeight * 3 / 2, frame.length);
    688                         // Pack frame if necessary.
    689                         if (writeLength < frame.length &&
    690                                 (frameStride > cropWidth || frameSliceHeight > cropHeight)) {
    691                             frame = PackYUV420(cropLeft, cropTop, cropWidth, cropHeight,
    692                                     frameStride, frameSliceHeight, frame);
    693                         }
    694                         yuv.write(frame, 0, writeLength);
    695                     }
    696                     outputFrameIndex++;
    697 
    698                     // Update statistics - store presentation time delay in offset
    699                     long presentationTimeUsDelta = inPresentationTimeUs - outPresentationTimeUs;
    700                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
    701                     bufferInfoCopy.set((int)presentationTimeUsDelta, bufferInfo.size,
    702                             outPresentationTimeUs, bufferInfo.flags);
    703                     bufferInfos.add(bufferInfoCopy);
    704                 }
    705                 decoder.releaseOutputBuffer(outputBufIndex, false);
    706             }
    707         }
    708         decoder.stop();
    709         decoder.release();
    710         ivf.close();
    711         if (yuv != null) {
    712             yuv.close();
    713         }
    714 
    715         return bufferInfos;
    716     }
    717 
    718 
    719     /**
    720      * Helper function to return InputStream from either filename (if set)
    721      * or resource id (if filename is not set).
    722      */
    723     private InputStream OpenFileOrResourceId(String filename, int resourceId) throws Exception {
    724         if (filename != null) {
    725             return new FileInputStream(filename);
    726         }
    727         return mResources.openRawResource(resourceId);
    728     }
    729 
    730     /**
    731      * Results of frame encoding.
    732      */
    733     protected class MediaEncoderOutput {
    734         public long inPresentationTimeUs;
    735         public long outPresentationTimeUs;
    736         public boolean outputGenerated;
    737         public int flags;
    738         public byte[] buffer;
    739     }
    740 
    741     protected class MediaEncoderAsyncHelper {
    742         private final EncoderOutputStreamParameters mStreamParams;
    743         private final CodecProperties mProperties;
    744         private final ArrayList<MediaCodec.BufferInfo> mBufferInfos;
    745         private final IvfWriter mIvf;
    746         private final byte[] mSrcFrame;
    747 
    748         private InputStream mYuvStream;
    749         private int mInputFrameIndex;
    750 
    751         MediaEncoderAsyncHelper(
    752                 EncoderOutputStreamParameters streamParams,
    753                 CodecProperties properties,
    754                 ArrayList<MediaCodec.BufferInfo> bufferInfos,
    755                 IvfWriter ivf)
    756                 throws Exception {
    757             mStreamParams = streamParams;
    758             mProperties = properties;
    759             mBufferInfos = bufferInfos;
    760             mIvf = ivf;
    761 
    762             int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2;
    763             mSrcFrame = new byte[srcFrameSize];
    764 
    765             mYuvStream = OpenFileOrResourceId(
    766                     streamParams.inputYuvFilename, streamParams.inputResourceId);
    767         }
    768 
    769         public byte[] getInputFrame() {
    770             // Check EOS
    771             if (mStreamParams.frameCount == 0
    772                     || (mStreamParams.frameCount > 0
    773                             && mInputFrameIndex >= mStreamParams.frameCount)) {
    774                 Log.d(TAG, "---Sending EOS empty frame for frame # " + mInputFrameIndex);
    775                 return null;
    776             }
    777 
    778             try {
    779                 int bytesRead = mYuvStream.read(mSrcFrame);
    780 
    781                 if (bytesRead == -1) {
    782                     // rewind to beginning of file
    783                     mYuvStream.close();
    784                     mYuvStream = OpenFileOrResourceId(
    785                             mStreamParams.inputYuvFilename, mStreamParams.inputResourceId);
    786                     bytesRead = mYuvStream.read(mSrcFrame);
    787                 }
    788             } catch (Exception e) {
    789                 Log.e(TAG, "Failed to read YUV file.");
    790                 return null;
    791             }
    792             mInputFrameIndex++;
    793 
    794             // Convert YUV420 to NV12 if necessary
    795             if (mProperties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
    796                 return YUV420ToNV(mStreamParams.frameWidth, mStreamParams.frameHeight,
    797                         mSrcFrame);
    798             } else {
    799                 return mSrcFrame;
    800             }
    801         }
    802 
    803         public boolean saveOutputFrame(MediaEncoderOutput out) {
    804             if (out.outputGenerated) {
    805                 if (out.buffer.length > 0) {
    806                     // Save frame
    807                     try {
    808                         mIvf.writeFrame(out.buffer, out.outPresentationTimeUs);
    809                     } catch (Exception e) {
    810                         Log.d(TAG, "Failed to write frame");
    811                         return true;
    812                     }
    813 
    814                     // Update statistics - store presentation time delay in offset
    815                     long presentationTimeUsDelta = out.inPresentationTimeUs -
    816                             out.outPresentationTimeUs;
    817                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
    818                     bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
    819                             out.outPresentationTimeUs, out.flags);
    820                     mBufferInfos.add(bufferInfoCopy);
    821                 }
    822                 // Detect output EOS
    823                 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    824                     Log.d(TAG, "----Output EOS ");
    825                     return true;
    826                 }
    827             }
    828             return false;
    829         }
    830     }
    831 
    832     /**
    833      * Video encoder wrapper class.
    834      * Allows to run the encoder either in a callee's thread or in a looper thread
    835      * using buffer dequeue ready notification callbacks.
    836      *
    837      * Function feedInput() is used to send raw video frame to the encoder input. When encoder
    838      * is configured to run in async mode the function will run in a looper thread.
    839      * Encoded frame can be retrieved by calling getOutput() function.
    840      */
    841     protected class MediaEncoderAsync extends Thread {
    842         private int mId;
    843         private MediaCodecWrapper mCodec;
    844         private ByteBuffer[] mInputBuffers;
    845         private ByteBuffer[] mOutputBuffers;
    846         private int mInputFrameIndex;
    847         private int mOutputFrameIndex;
    848         private int mInputBufIndex;
    849         private int mFrameRate;
    850         private long mTimeout;
    851         private MediaCodec.BufferInfo mBufferInfo;
    852         private long mInPresentationTimeUs;
    853         private long mOutPresentationTimeUs;
    854         private boolean mAsync;
    855         // Flag indicating if input frame was consumed by the encoder in feedInput() call.
    856         private boolean mConsumedInput;
    857         // Result of frame encoding returned by getOutput() call.
    858         private MediaEncoderOutput mOutput;
    859         // Object used to signal that looper thread has started and Handler instance associated
    860         // with looper thread has been allocated.
    861         private final Object mThreadEvent = new Object();
    862         // Object used to signal that MediaCodec buffer dequeue notification callback
    863         // was received.
    864         private final Object mCallbackEvent = new Object();
    865         private Handler mHandler;
    866         private boolean mCallbackReceived;
    867         private MediaEncoderAsyncHelper mHelper;
    868         private final Object mCompletionEvent = new Object();
    869         private boolean mCompleted;
    870 
    871         private MediaCodec.Callback mCallback = new MediaCodec.Callback() {
    872             @Override
    873             public void onInputBufferAvailable(MediaCodec codec, int index) {
    874                 if (mHelper == null) {
    875                     Log.e(TAG, "async helper not available");
    876                     return;
    877                 }
    878 
    879                 byte[] encFrame = mHelper.getInputFrame();
    880                 boolean inputEOS = (encFrame == null);
    881 
    882                 int encFrameLength = 0;
    883                 int flags = 0;
    884                 if (inputEOS) {
    885                     flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
    886                 } else {
    887                     encFrameLength = encFrame.length;
    888 
    889                     ByteBuffer byteBuffer = mCodec.getInputBuffer(index);
    890                     byteBuffer.put(encFrame);
    891                     byteBuffer.rewind();
    892 
    893                     mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate;
    894 
    895                     Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex +
    896                             ". InTime: " + (mInPresentationTimeUs + 500)/1000);
    897 
    898                     mInputFrameIndex++;
    899                 }
    900 
    901                 mCodec.queueInputBuffer(
    902                         index,
    903                         0,  // offset
    904                         encFrameLength,  // size
    905                         mInPresentationTimeUs,
    906                         flags);
    907             }
    908 
    909             @Override
    910             public void onOutputBufferAvailable(MediaCodec codec,
    911                     int index, MediaCodec.BufferInfo info) {
    912                 if (mHelper == null) {
    913                     Log.e(TAG, "async helper not available");
    914                     return;
    915                 }
    916 
    917                 MediaEncoderOutput out = new MediaEncoderOutput();
    918 
    919                 out.buffer = new byte[info.size];
    920                 ByteBuffer outputBuffer = mCodec.getOutputBuffer(index);
    921                 outputBuffer.get(out.buffer, 0, info.size);
    922                 mOutPresentationTimeUs = info.presentationTimeUs;
    923 
    924                 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex;
    925                 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
    926                     logStr += " CONFIG. ";
    927                 }
    928                 if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
    929                     logStr += " KEY. ";
    930                 }
    931                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    932                     logStr += " EOS. ";
    933                 }
    934                 logStr += " Size: " + info.size;
    935                 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 +
    936                         ". OutTime: " + (mOutPresentationTimeUs + 500)/1000;
    937                 Log.v(TAG, logStr);
    938 
    939                 if (mOutputFrameIndex == 0 &&
    940                         ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) == 0) ) {
    941                     throw new RuntimeException("First frame is not a sync frame.");
    942                 }
    943 
    944                 if (info.size > 0) {
    945                     mOutputFrameIndex++;
    946                     out.inPresentationTimeUs = mInPresentationTimeUs;
    947                     out.outPresentationTimeUs = mOutPresentationTimeUs;
    948                 }
    949                 mCodec.releaseOutputBuffer(index, false);
    950 
    951                 out.flags = info.flags;
    952                 out.outputGenerated = true;
    953 
    954                 if (mHelper.saveOutputFrame(out)) {
    955                     // output EOS
    956                     signalCompletion();
    957                 }
    958             }
    959 
    960             @Override
    961             public void onError(MediaCodec codec, CodecException e) {
    962                 Log.e(TAG, "onError: " + e
    963                         + ", transient " + e.isTransient()
    964                         + ", recoverable " + e.isRecoverable()
    965                         + ", error " + e.getErrorCode());
    966             }
    967 
    968             @Override
    969             public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
    970                 Log.i(TAG, "onOutputFormatChanged: " + format.toString());
    971             }
    972         };
    973 
    974         private synchronized void requestStart() throws Exception {
    975             mHandler = null;
    976             start();
    977             // Wait for Hander allocation
    978             synchronized (mThreadEvent) {
    979                 while (mHandler == null) {
    980                     mThreadEvent.wait();
    981                 }
    982             }
    983         }
    984 
    985         public void setAsyncHelper(MediaEncoderAsyncHelper helper) {
    986             mHelper = helper;
    987         }
    988 
    989         @Override
    990         public void run() {
    991             Looper.prepare();
    992             synchronized (mThreadEvent) {
    993                 mHandler = new Handler();
    994                 mThreadEvent.notify();
    995             }
    996             Looper.loop();
    997         }
    998 
    999         private void runCallable(final Callable<?> callable) throws Exception {
   1000             if (mAsync) {
   1001                 final Exception[] exception = new Exception[1];
   1002                 final CountDownLatch countDownLatch = new CountDownLatch(1);
   1003                 mHandler.post( new Runnable() {
   1004                     @Override
   1005                     public void run() {
   1006                         try {
   1007                             callable.call();
   1008                         } catch (Exception e) {
   1009                             exception[0] = e;
   1010                         } finally {
   1011                             countDownLatch.countDown();
   1012                         }
   1013                     }
   1014                 } );
   1015 
   1016                 // Wait for task completion
   1017                 countDownLatch.await();
   1018                 if (exception[0] != null) {
   1019                     throw exception[0];
   1020                 }
   1021             } else {
   1022                 callable.call();
   1023             }
   1024         }
   1025 
   1026         private synchronized void requestStop() throws Exception {
   1027             mHandler.post( new Runnable() {
   1028                 @Override
   1029                 public void run() {
   1030                     // This will run on the Looper thread
   1031                     Log.v(TAG, "MediaEncoder looper quitting");
   1032                     Looper.myLooper().quitSafely();
   1033                 }
   1034             } );
   1035             // Wait for completion
   1036             join();
   1037             mHandler = null;
   1038         }
   1039 
   1040         private void createCodecInternal(final String name,
   1041                 final MediaFormat format, final long timeout, boolean useNdk) throws Exception {
   1042             mBufferInfo = new MediaCodec.BufferInfo();
   1043             mFrameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
   1044             mTimeout = timeout;
   1045             mInputFrameIndex = 0;
   1046             mOutputFrameIndex = 0;
   1047             mInPresentationTimeUs = 0;
   1048             mOutPresentationTimeUs = 0;
   1049 
   1050             if (useNdk) {
   1051                 mCodec = new NdkMediaCodec(name);
   1052             } else {
   1053                 mCodec = new SdkMediaCodec(MediaCodec.createByCodecName(name), mAsync);
   1054             }
   1055             if (mAsync) {
   1056                 mCodec.setCallback(mCallback);
   1057             }
   1058             mCodec.configure(format, MediaCodec.CONFIGURE_FLAG_ENCODE);
   1059             mCodec.start();
   1060 
   1061             // get the cached input/output only in sync mode
   1062             if (!mAsync) {
   1063                 mInputBuffers = mCodec.getInputBuffers();
   1064                 mOutputBuffers = mCodec.getOutputBuffers();
   1065             }
   1066         }
   1067 
   1068         public void createCodec(int id, final String name, final MediaFormat format,
   1069                 final long timeout, boolean async, final boolean useNdk)  throws Exception {
   1070             mId = id;
   1071             mAsync = async;
   1072             if (mAsync) {
   1073                 requestStart(); // start looper thread
   1074             }
   1075             runCallable( new Callable<Void>() {
   1076                 @Override
   1077                 public Void call() throws Exception {
   1078                     createCodecInternal(name, format, timeout, useNdk);
   1079                     return null;
   1080                 }
   1081             } );
   1082         }
   1083 
   1084         private void feedInputInternal(final byte[] encFrame, final boolean inputEOS) {
   1085             mConsumedInput = false;
   1086             // Feed input
   1087             mInputBufIndex = mCodec.dequeueInputBuffer(mTimeout);
   1088 
   1089             if (mInputBufIndex >= 0) {
   1090                 ByteBuffer inputBuffer = mCodec.getInputBuffer(mInputBufIndex);
   1091                 inputBuffer.clear();
   1092                 inputBuffer.put(encFrame);
   1093                 inputBuffer.rewind();
   1094                 int encFrameLength = encFrame.length;
   1095                 int flags = 0;
   1096                 if (inputEOS) {
   1097                     encFrameLength = 0;
   1098                     flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
   1099                 }
   1100                 if (!inputEOS) {
   1101                     Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex +
   1102                             ". InTime: " + (mInPresentationTimeUs + 500)/1000);
   1103                     mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate;
   1104                     mInputFrameIndex++;
   1105                 }
   1106 
   1107                 mCodec.queueInputBuffer(
   1108                         mInputBufIndex,
   1109                         0,  // offset
   1110                         encFrameLength,  // size
   1111                         mInPresentationTimeUs,
   1112                         flags);
   1113 
   1114                 mConsumedInput = true;
   1115             } else {
   1116                 Log.v(TAG, "In " + mId + " - TRY_AGAIN_LATER");
   1117             }
   1118             mCallbackReceived = false;
   1119         }
   1120 
   1121         public boolean feedInput(final byte[] encFrame, final boolean inputEOS) throws Exception {
   1122             runCallable( new Callable<Void>() {
   1123                 @Override
   1124                 public Void call() throws Exception {
   1125                     feedInputInternal(encFrame, inputEOS);
   1126                     return null;
   1127                 }
   1128             } );
   1129             return mConsumedInput;
   1130         }
   1131 
   1132         private void getOutputInternal() {
   1133             mOutput = new MediaEncoderOutput();
   1134             mOutput.inPresentationTimeUs = mInPresentationTimeUs;
   1135             mOutput.outPresentationTimeUs = mOutPresentationTimeUs;
   1136             mOutput.outputGenerated = false;
   1137 
   1138             // Get output from the encoder
   1139             int result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout);
   1140             while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
   1141                     result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
   1142                 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
   1143                     mOutputBuffers = mCodec.getOutputBuffers();
   1144                 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
   1145                     Log.d(TAG, "Format changed: " + mCodec.getOutputFormatString());
   1146                 }
   1147                 result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout);
   1148             }
   1149             if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
   1150                 Log.v(TAG, "Out " + mId + " - TRY_AGAIN_LATER");
   1151             }
   1152 
   1153             if (result >= 0) {
   1154                 int outputBufIndex = result;
   1155                 mOutput.buffer = new byte[mBufferInfo.size];
   1156                 ByteBuffer outputBuffer = mCodec.getOutputBuffer(outputBufIndex);
   1157                 outputBuffer.position(mBufferInfo.offset);
   1158                 outputBuffer.get(mOutput.buffer, 0, mBufferInfo.size);
   1159                 mOutPresentationTimeUs = mBufferInfo.presentationTimeUs;
   1160 
   1161                 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex;
   1162                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
   1163                     logStr += " CONFIG. ";
   1164                 }
   1165                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
   1166                     logStr += " KEY. ";
   1167                 }
   1168                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
   1169                     logStr += " EOS. ";
   1170                 }
   1171                 logStr += " Size: " + mBufferInfo.size;
   1172                 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 +
   1173                         ". OutTime: " + (mOutPresentationTimeUs + 500)/1000;
   1174                 Log.v(TAG, logStr);
   1175                 if (mOutputFrameIndex == 0 &&
   1176                         ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) == 0) ) {
   1177                     throw new RuntimeException("First frame is not a sync frame.");
   1178                 }
   1179 
   1180                 if (mBufferInfo.size > 0) {
   1181                     mOutputFrameIndex++;
   1182                     mOutput.outPresentationTimeUs = mOutPresentationTimeUs;
   1183                 }
   1184                 mCodec.releaseOutputBuffer(outputBufIndex, false);
   1185 
   1186                 mOutput.flags = mBufferInfo.flags;
   1187                 mOutput.outputGenerated = true;
   1188             }
   1189             mCallbackReceived = false;
   1190         }
   1191 
   1192         public MediaEncoderOutput getOutput() throws Exception {
   1193             runCallable( new Callable<Void>() {
   1194                 @Override
   1195                 public Void call() throws Exception {
   1196                     getOutputInternal();
   1197                     return null;
   1198                 }
   1199             } );
   1200             return mOutput;
   1201         }
   1202 
   1203         public void forceSyncFrame() throws Exception {
   1204             final Bundle syncFrame = new Bundle();
   1205             syncFrame.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
   1206             runCallable( new Callable<Void>() {
   1207                 @Override
   1208                 public Void call() throws Exception {
   1209                     mCodec.setParameters(syncFrame);
   1210                     return null;
   1211                 }
   1212             } );
   1213         }
   1214 
   1215         public void updateBitrate(int bitrate) throws Exception {
   1216             final Bundle bitrateUpdate = new Bundle();
   1217             bitrateUpdate.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitrate);
   1218             runCallable( new Callable<Void>() {
   1219                 @Override
   1220                 public Void call() throws Exception {
   1221                     mCodec.setParameters(bitrateUpdate);
   1222                     return null;
   1223                 }
   1224             } );
   1225         }
   1226 
   1227 
   1228         public void waitForBufferEvent() throws Exception {
   1229             Log.v(TAG, "----Enc" + mId + " waiting for bufferEvent");
   1230             if (mAsync) {
   1231                 synchronized (mCallbackEvent) {
   1232                     if (!mCallbackReceived) {
   1233                         mCallbackEvent.wait(1000); // wait 1 sec for a callback
   1234                         // throw an exception if callback was not received
   1235                         if (!mCallbackReceived) {
   1236                             throw new RuntimeException("MediaCodec callback was not received");
   1237                         }
   1238                     }
   1239                 }
   1240             } else {
   1241                 Thread.sleep(5);
   1242             }
   1243             Log.v(TAG, "----Waiting for bufferEvent done");
   1244         }
   1245 
   1246 
   1247         public void waitForCompletion(long timeoutMs) throws Exception {
   1248             synchronized (mCompletionEvent) {
   1249                 long timeoutExpiredMs = System.currentTimeMillis() + timeoutMs;
   1250 
   1251                 while (!mCompleted) {
   1252                     mCompletionEvent.wait(timeoutExpiredMs - System.currentTimeMillis());
   1253                     if (System.currentTimeMillis() >= timeoutExpiredMs) {
   1254                         throw new RuntimeException("encoding has timed out!");
   1255                     }
   1256                 }
   1257             }
   1258         }
   1259 
   1260         public void signalCompletion() {
   1261             synchronized (mCompletionEvent) {
   1262                 mCompleted = true;
   1263                 mCompletionEvent.notify();
   1264             }
   1265         }
   1266 
   1267         public void deleteCodec() throws Exception {
   1268             runCallable( new Callable<Void>() {
   1269                 @Override
   1270                 public Void call() throws Exception {
   1271                     mCodec.stop();
   1272                     mCodec.release();
   1273                     return null;
   1274                 }
   1275             } );
   1276             if (mAsync) {
   1277                 requestStop(); // Stop looper thread
   1278             }
   1279         }
   1280     }
   1281 
   1282     /**
   1283      * Vpx encoding loop supporting encoding single streams with an option
   1284      * to run in a looper thread and use buffer ready notification callbacks.
   1285      *
   1286      * Output stream is described by encodingParams parameters.
   1287      *
   1288      * MediaCodec will raise an IllegalStateException
   1289      * whenever vpx encoder fails to encode a frame.
   1290      *
   1291      * Color format of input file should be YUV420, and frameWidth,
   1292      * frameHeight should be supplied correctly as raw input file doesn't
   1293      * include any header data.
   1294      *
   1295      * @param streamParams  Structure with encoder parameters
   1296      * @return              Returns array of encoded frames information for each frame.
   1297      */
   1298     protected ArrayList<MediaCodec.BufferInfo> encode(
   1299             EncoderOutputStreamParameters streamParams) throws Exception {
   1300 
   1301         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
   1302         Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " +
   1303                 streamParams.frameHeight);
   1304         int bitrate = streamParams.bitrateSet[0];
   1305 
   1306         // Create minimal media format signifying desired output.
   1307         MediaFormat format = MediaFormat.createVideoFormat(
   1308                 streamParams.codecMimeType, streamParams.frameWidth,
   1309                 streamParams.frameHeight);
   1310         format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
   1311         CodecProperties properties = getVpxCodecProperties(
   1312                 true, format, streamParams.forceGoogleEncoder);
   1313         if (properties == null) {
   1314             return null;
   1315         }
   1316 
   1317         // Open input/output
   1318         InputStream yuvStream = OpenFileOrResourceId(
   1319                 streamParams.inputYuvFilename, streamParams.inputResourceId);
   1320         IvfWriter ivf = new IvfWriter(
   1321                 streamParams.outputIvfFilename, streamParams.codecMimeType,
   1322                 streamParams.frameWidth, streamParams.frameHeight);
   1323 
   1324         // Create a media format signifying desired output.
   1325         if (streamParams.bitrateType == VIDEO_ControlRateConstant) {
   1326             format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
   1327         }
   1328         if (streamParams.temporalLayers > 0) {
   1329             format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer
   1330         }
   1331         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
   1332         format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate);
   1333         int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) /
   1334                 streamParams.frameRate;
   1335         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
   1336 
   1337         // Create encoder
   1338         Log.d(TAG, "Creating encoder " + properties.codecName +
   1339                 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
   1340                 streamParams.frameWidth + " x " + streamParams.frameHeight +
   1341                 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType +
   1342                 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers +
   1343                 ". Key frame:" + syncFrameInterval * streamParams.frameRate +
   1344                 ". Force keyFrame: " + streamParams.syncForceFrameInterval);
   1345         Log.d(TAG, "  Format: " + format);
   1346         Log.d(TAG, "  Output ivf:" + streamParams.outputIvfFilename);
   1347         MediaEncoderAsync codec = new MediaEncoderAsync();
   1348         codec.createCodec(0, properties.codecName, format,
   1349                 streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk);
   1350 
   1351         // encode loop
   1352         boolean sawInputEOS = false;  // no more data
   1353         boolean consumedInputEOS = false; // EOS flag is consumed dy encoder
   1354         boolean sawOutputEOS = false;
   1355         boolean inputConsumed = true;
   1356         int inputFrameIndex = 0;
   1357         int lastBitrate = bitrate;
   1358         int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2;
   1359         byte[] srcFrame = new byte[srcFrameSize];
   1360 
   1361         while (!sawOutputEOS) {
   1362 
   1363             // Read and feed input frame
   1364             if (!consumedInputEOS) {
   1365 
   1366                 // Read new input buffers - if previous input was consumed and no EOS
   1367                 if (inputConsumed && !sawInputEOS) {
   1368                     int bytesRead = yuvStream.read(srcFrame);
   1369 
   1370                     // Check EOS
   1371                     if (streamParams.frameCount > 0 && inputFrameIndex >= streamParams.frameCount) {
   1372                         sawInputEOS = true;
   1373                         Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex);
   1374                     }
   1375 
   1376                     if (!sawInputEOS && bytesRead == -1) {
   1377                         if (streamParams.frameCount == 0) {
   1378                             sawInputEOS = true;
   1379                             Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex);
   1380                         } else {
   1381                             yuvStream.close();
   1382                             yuvStream = OpenFileOrResourceId(
   1383                                     streamParams.inputYuvFilename, streamParams.inputResourceId);
   1384                             bytesRead = yuvStream.read(srcFrame);
   1385                         }
   1386                     }
   1387 
   1388                     // Force sync frame if syncForceFrameinterval is set.
   1389                     if (!sawInputEOS && inputFrameIndex > 0 &&
   1390                             streamParams.syncForceFrameInterval > 0 &&
   1391                             (inputFrameIndex % streamParams.syncForceFrameInterval) == 0) {
   1392                         Log.d(TAG, "---Requesting sync frame # " + inputFrameIndex);
   1393                         codec.forceSyncFrame();
   1394                     }
   1395 
   1396                     // Dynamic bitrate change.
   1397                     if (!sawInputEOS && streamParams.bitrateSet.length > inputFrameIndex) {
   1398                         int newBitrate = streamParams.bitrateSet[inputFrameIndex];
   1399                         if (newBitrate != lastBitrate) {
   1400                             Log.d(TAG, "--- Requesting new bitrate " + newBitrate +
   1401                                     " for frame " + inputFrameIndex);
   1402                             codec.updateBitrate(newBitrate);
   1403                             lastBitrate = newBitrate;
   1404                         }
   1405                     }
   1406 
   1407                     // Convert YUV420 to NV12 if necessary
   1408                     if (properties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
   1409                         srcFrame = YUV420ToNV(streamParams.frameWidth, streamParams.frameHeight,
   1410                                 srcFrame);
   1411                     }
   1412                 }
   1413 
   1414                 inputConsumed = codec.feedInput(srcFrame, sawInputEOS);
   1415                 if (inputConsumed) {
   1416                     inputFrameIndex++;
   1417                     consumedInputEOS = sawInputEOS;
   1418                 }
   1419             }
   1420 
   1421             // Get output from the encoder
   1422             MediaEncoderOutput out = codec.getOutput();
   1423             if (out.outputGenerated) {
   1424                 // Detect output EOS
   1425                 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
   1426                     Log.d(TAG, "----Output EOS ");
   1427                     sawOutputEOS = true;
   1428                 }
   1429 
   1430                 if (out.buffer.length > 0) {
   1431                     // Save frame
   1432                     ivf.writeFrame(out.buffer, out.outPresentationTimeUs);
   1433 
   1434                     // Update statistics - store presentation time delay in offset
   1435                     long presentationTimeUsDelta = out.inPresentationTimeUs -
   1436                             out.outPresentationTimeUs;
   1437                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
   1438                     bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
   1439                             out.outPresentationTimeUs, out.flags);
   1440                     bufferInfos.add(bufferInfoCopy);
   1441                 }
   1442             }
   1443 
   1444             // If codec is not ready to accept input/poutput - wait for buffer ready callback
   1445             if ((!inputConsumed || consumedInputEOS) && !out.outputGenerated) {
   1446                 codec.waitForBufferEvent();
   1447             }
   1448         }
   1449 
   1450         codec.deleteCodec();
   1451         ivf.close();
   1452         yuvStream.close();
   1453 
   1454         return bufferInfos;
   1455     }
   1456 
   1457     /**
   1458      * Vpx encoding run in a looper thread and use buffer ready callbacks.
   1459      *
   1460      * Output stream is described by encodingParams parameters.
   1461      *
   1462      * MediaCodec will raise an IllegalStateException
   1463      * whenever vpx encoder fails to encode a frame.
   1464      *
   1465      * Color format of input file should be YUV420, and frameWidth,
   1466      * frameHeight should be supplied correctly as raw input file doesn't
   1467      * include any header data.
   1468      *
   1469      * @param streamParams  Structure with encoder parameters
   1470      * @return              Returns array of encoded frames information for each frame.
   1471      */
   1472     protected ArrayList<MediaCodec.BufferInfo> encodeAsync(
   1473             EncoderOutputStreamParameters streamParams) throws Exception {
   1474         if (!streamParams.runInLooperThread) {
   1475             throw new RuntimeException("encodeAsync should run with a looper thread!");
   1476         }
   1477 
   1478         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
   1479         Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " +
   1480                 streamParams.frameHeight);
   1481         int bitrate = streamParams.bitrateSet[0];
   1482 
   1483         // Create minimal media format signifying desired output.
   1484         MediaFormat format = MediaFormat.createVideoFormat(
   1485                 streamParams.codecMimeType, streamParams.frameWidth,
   1486                 streamParams.frameHeight);
   1487         format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
   1488         CodecProperties properties = getVpxCodecProperties(
   1489                 true, format, streamParams.forceGoogleEncoder);
   1490         if (properties == null) {
   1491             return null;
   1492         }
   1493 
   1494         // Open input/output
   1495         IvfWriter ivf = new IvfWriter(
   1496                 streamParams.outputIvfFilename, streamParams.codecMimeType,
   1497                 streamParams.frameWidth, streamParams.frameHeight);
   1498 
   1499         // Create a media format signifying desired output.
   1500         if (streamParams.bitrateType == VIDEO_ControlRateConstant) {
   1501             format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
   1502         }
   1503         if (streamParams.temporalLayers > 0) {
   1504             format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer
   1505         }
   1506         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
   1507         format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate);
   1508         int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) /
   1509                 streamParams.frameRate;
   1510         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
   1511 
   1512         // Create encoder
   1513         Log.d(TAG, "Creating encoder " + properties.codecName +
   1514                 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
   1515                 streamParams.frameWidth + " x " + streamParams.frameHeight +
   1516                 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType +
   1517                 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers +
   1518                 ". Key frame:" + syncFrameInterval * streamParams.frameRate +
   1519                 ". Force keyFrame: " + streamParams.syncForceFrameInterval);
   1520         Log.d(TAG, "  Format: " + format);
   1521         Log.d(TAG, "  Output ivf:" + streamParams.outputIvfFilename);
   1522 
   1523         MediaEncoderAsync codec = new MediaEncoderAsync();
   1524         MediaEncoderAsyncHelper helper = new MediaEncoderAsyncHelper(
   1525                 streamParams, properties, bufferInfos, ivf);
   1526 
   1527         codec.setAsyncHelper(helper);
   1528         codec.createCodec(0, properties.codecName, format,
   1529                 streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk);
   1530         codec.waitForCompletion(DEFAULT_ENCODE_TIMEOUT_MS);
   1531 
   1532         codec.deleteCodec();
   1533         ivf.close();
   1534 
   1535         return bufferInfos;
   1536     }
   1537 
   1538     /**
   1539      * Vpx encoding loop supporting encoding multiple streams at a time.
   1540      * Each output stream is described by encodingParams parameters allowing
   1541      * simultaneous encoding of various resolutions, bitrates with an option to
   1542      * control key frame and dynamic bitrate for each output stream indepandently.
   1543      *
   1544      * MediaCodec will raise an IllegalStateException
   1545      * whenever vpx encoder fails to encode a frame.
   1546      *
   1547      * Color format of input file should be YUV420, and frameWidth,
   1548      * frameHeight should be supplied correctly as raw input file doesn't
   1549      * include any header data.
   1550      *
   1551      * @param srcFrameWidth     Frame width of input yuv file
   1552      * @param srcFrameHeight    Frame height of input yuv file
   1553      * @param encodingParams    Encoder parameters
   1554      * @return                  Returns 2D array of encoded frames information for each stream and
   1555      *                          for each frame.
   1556      */
   1557     protected ArrayList<ArrayList<MediaCodec.BufferInfo>> encodeSimulcast(
   1558             int srcFrameWidth,
   1559             int srcFrameHeight,
   1560             ArrayList<EncoderOutputStreamParameters> encodingParams)  throws Exception {
   1561         int numEncoders = encodingParams.size();
   1562 
   1563         // Create arrays of input/output, formats, bitrates etc
   1564         ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos =
   1565                 new ArrayList<ArrayList<MediaCodec.BufferInfo>>(numEncoders);
   1566         InputStream yuvStream[] = new InputStream[numEncoders];
   1567         IvfWriter[] ivf = new IvfWriter[numEncoders];
   1568         FileOutputStream[] yuvScaled = new FileOutputStream[numEncoders];
   1569         MediaFormat[] format = new MediaFormat[numEncoders];
   1570         MediaEncoderAsync[] codec = new MediaEncoderAsync[numEncoders];
   1571         int[] inputFrameIndex = new int[numEncoders];
   1572         boolean[] sawInputEOS = new boolean[numEncoders];
   1573         boolean[] consumedInputEOS = new boolean[numEncoders];
   1574         boolean[] inputConsumed = new boolean[numEncoders];
   1575         boolean[] bufferConsumed = new boolean[numEncoders];
   1576         boolean[] sawOutputEOS = new boolean[numEncoders];
   1577         byte[][] srcFrame = new byte[numEncoders][];
   1578         boolean sawOutputEOSTotal = false;
   1579         boolean bufferConsumedTotal = false;
   1580         CodecProperties[] codecProperties = new CodecProperties[numEncoders];
   1581 
   1582         numEncoders = 0;
   1583         for (EncoderOutputStreamParameters params : encodingParams) {
   1584             int i = numEncoders;
   1585             Log.d(TAG, "Source resolution: " + params.frameWidth + " x " +
   1586                     params.frameHeight);
   1587             int bitrate = params.bitrateSet[0];
   1588 
   1589             // Create minimal media format signifying desired output.
   1590             format[i] = MediaFormat.createVideoFormat(
   1591                     params.codecMimeType, params.frameWidth,
   1592                     params.frameHeight);
   1593             format[i].setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
   1594             CodecProperties properties = getVpxCodecProperties(
   1595                     true, format[i], params.forceGoogleEncoder);
   1596             if (properties == null) {
   1597                 continue;
   1598             }
   1599 
   1600             // Check if scaled image was created
   1601             int scale = params.frameWidth / srcFrameWidth;
   1602             if (!mScaledImages.contains(scale)) {
   1603                 // resize image
   1604                 cacheScaledImage(params.inputYuvFilename, params.inputResourceId,
   1605                         srcFrameWidth, srcFrameHeight,
   1606                         params.scaledYuvFilename, params.frameWidth, params.frameHeight);
   1607                 mScaledImages.add(scale);
   1608             }
   1609 
   1610             // Create buffer info storage
   1611             bufferInfos.add(new ArrayList<MediaCodec.BufferInfo>());
   1612 
   1613             // Create YUV reader
   1614             yuvStream[i] = new FileInputStream(params.scaledYuvFilename);
   1615 
   1616             // Create IVF writer
   1617             ivf[i] = new IvfWriter(
   1618                     params.outputIvfFilename, params.codecMimeType,
   1619                     params.frameWidth, params.frameHeight);
   1620 
   1621             // Frame buffer
   1622             int frameSize = params.frameWidth * params.frameHeight * 3 / 2;
   1623             srcFrame[i] = new byte[frameSize];
   1624 
   1625             // Create a media format signifying desired output.
   1626             if (params.bitrateType == VIDEO_ControlRateConstant) {
   1627                 format[i].setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
   1628             }
   1629             if (params.temporalLayers > 0) {
   1630                 format[i].setInteger("ts-layers", params.temporalLayers); // 1 temporal layer
   1631             }
   1632             format[i].setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
   1633             format[i].setInteger(MediaFormat.KEY_FRAME_RATE, params.frameRate);
   1634             int syncFrameInterval = (params.syncFrameInterval + params.frameRate/2) /
   1635                     params.frameRate; // in sec
   1636             format[i].setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
   1637             // Create encoder
   1638             Log.d(TAG, "Creating encoder #" + i +" : " + properties.codecName +
   1639                     ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
   1640                     params.frameWidth + " x " + params.frameHeight +
   1641                     ". Bitrate: " + bitrate + " Bitrate type: " + params.bitrateType +
   1642                     ". Fps:" + params.frameRate + ". TS Layers: " + params.temporalLayers +
   1643                     ". Key frame:" + syncFrameInterval * params.frameRate +
   1644                     ". Force keyFrame: " + params.syncForceFrameInterval);
   1645             Log.d(TAG, "  Format: " + format[i]);
   1646             Log.d(TAG, "  Output ivf:" + params.outputIvfFilename);
   1647 
   1648             // Create encoder
   1649             codec[i] = new MediaEncoderAsync();
   1650             codec[i].createCodec(i, properties.codecName, format[i],
   1651                     params.timeoutDequeue, params.runInLooperThread, params.useNdk);
   1652             codecProperties[i] = new CodecProperties(properties.codecName, properties.colorFormat);
   1653 
   1654             inputConsumed[i] = true;
   1655             ++numEncoders;
   1656         }
   1657         if (numEncoders == 0) {
   1658             Log.i(TAG, "no suitable encoders found for any of the streams");
   1659             return null;
   1660         }
   1661 
   1662         while (!sawOutputEOSTotal) {
   1663             // Feed input buffer to all encoders
   1664             for (int i = 0; i < numEncoders; i++) {
   1665                 bufferConsumed[i] = false;
   1666                 if (consumedInputEOS[i]) {
   1667                     continue;
   1668                 }
   1669 
   1670                 EncoderOutputStreamParameters params = encodingParams.get(i);
   1671                 // Read new input buffers - if previous input was consumed and no EOS
   1672                 if (inputConsumed[i] && !sawInputEOS[i]) {
   1673                     int bytesRead = yuvStream[i].read(srcFrame[i]);
   1674 
   1675                     // Check EOS
   1676                     if (params.frameCount > 0 && inputFrameIndex[i] >= params.frameCount) {
   1677                         sawInputEOS[i] = true;
   1678                         Log.d(TAG, "---Enc" + i +
   1679                                 ". Sending EOS empty frame for frame # " + inputFrameIndex[i]);
   1680                     }
   1681 
   1682                     if (!sawInputEOS[i] && bytesRead == -1) {
   1683                         if (params.frameCount == 0) {
   1684                             sawInputEOS[i] = true;
   1685                             Log.d(TAG, "---Enc" + i +
   1686                                     ". Sending EOS empty frame for frame # " + inputFrameIndex[i]);
   1687                         } else {
   1688                             yuvStream[i].close();
   1689                             yuvStream[i] = new FileInputStream(params.scaledYuvFilename);
   1690                             bytesRead = yuvStream[i].read(srcFrame[i]);
   1691                         }
   1692                     }
   1693 
   1694                     // Convert YUV420 to NV12 if necessary
   1695                     if (codecProperties[i].colorFormat !=
   1696                             CodecCapabilities.COLOR_FormatYUV420Planar) {
   1697                         srcFrame[i] =
   1698                             YUV420ToNV(params.frameWidth, params.frameHeight, srcFrame[i]);
   1699                     }
   1700                 }
   1701 
   1702                 inputConsumed[i] = codec[i].feedInput(srcFrame[i], sawInputEOS[i]);
   1703                 if (inputConsumed[i]) {
   1704                     inputFrameIndex[i]++;
   1705                     consumedInputEOS[i] = sawInputEOS[i];
   1706                     bufferConsumed[i] = true;
   1707                 }
   1708 
   1709             }
   1710 
   1711             // Get output from all encoders
   1712             for (int i = 0; i < numEncoders; i++) {
   1713                 if (sawOutputEOS[i]) {
   1714                     continue;
   1715                 }
   1716 
   1717                 MediaEncoderOutput out = codec[i].getOutput();
   1718                 if (out.outputGenerated) {
   1719                     bufferConsumed[i] = true;
   1720                     // Detect output EOS
   1721                     if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
   1722                         Log.d(TAG, "----Enc" + i + ". Output EOS ");
   1723                         sawOutputEOS[i] = true;
   1724                     }
   1725 
   1726                     if (out.buffer.length > 0) {
   1727                         // Save frame
   1728                         ivf[i].writeFrame(out.buffer, out.outPresentationTimeUs);
   1729 
   1730                         // Update statistics - store presentation time delay in offset
   1731                         long presentationTimeUsDelta = out.inPresentationTimeUs -
   1732                                 out.outPresentationTimeUs;
   1733                         MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
   1734                         bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
   1735                                 out.outPresentationTimeUs, out.flags);
   1736                         bufferInfos.get(i).add(bufferInfoCopy);
   1737                     }
   1738                 }
   1739             }
   1740 
   1741             // If codec is not ready to accept input/output - wait for buffer ready callback
   1742             bufferConsumedTotal = false;
   1743             for (boolean bufferConsumedCurrent : bufferConsumed) {
   1744                 bufferConsumedTotal |= bufferConsumedCurrent;
   1745             }
   1746             if (!bufferConsumedTotal) {
   1747                 // Pick the encoder to wait for
   1748                 for (int i = 0; i < numEncoders; i++) {
   1749                     if (!bufferConsumed[i] && !sawOutputEOS[i]) {
   1750                         codec[i].waitForBufferEvent();
   1751                         break;
   1752                     }
   1753                 }
   1754             }
   1755 
   1756             // Check if EOS happened for all encoders
   1757             sawOutputEOSTotal = true;
   1758             for (boolean sawOutputEOSStream : sawOutputEOS) {
   1759                 sawOutputEOSTotal &= sawOutputEOSStream;
   1760             }
   1761         }
   1762 
   1763         for (int i = 0; i < numEncoders; i++) {
   1764             codec[i].deleteCodec();
   1765             ivf[i].close();
   1766             yuvStream[i].close();
   1767             if (yuvScaled[i] != null) {
   1768                 yuvScaled[i].close();
   1769             }
   1770         }
   1771 
   1772         return bufferInfos;
   1773     }
   1774 
   1775     /**
   1776      * Some encoding statistics.
   1777      */
   1778     protected class VpxEncodingStatistics {
   1779         VpxEncodingStatistics() {
   1780             mBitrates = new ArrayList<Integer>();
   1781             mFrames = new ArrayList<Integer>();
   1782             mKeyFrames = new ArrayList<Integer>();
   1783             mMinimumKeyFrameInterval = Integer.MAX_VALUE;
   1784         }
   1785 
   1786         public ArrayList<Integer> mBitrates;// Bitrate values for each second of the encoded stream.
   1787         public ArrayList<Integer> mFrames; // Number of frames in each second of the encoded stream.
   1788         public int mAverageBitrate;         // Average stream bitrate.
   1789         public ArrayList<Integer> mKeyFrames;// Stores the position of key frames in a stream.
   1790         public int mAverageKeyFrameInterval; // Average key frame interval.
   1791         public int mMaximumKeyFrameInterval; // Maximum key frame interval.
   1792         public int mMinimumKeyFrameInterval; // Minimum key frame interval.
   1793     }
   1794 
   1795     /**
   1796      * Calculates average bitrate and key frame interval for the encoded streams.
   1797      * Output mBitrates field will contain bitrate values for every second
   1798      * of the encoded stream.
   1799      * Average stream bitrate will be stored in mAverageBitrate field.
   1800      * mKeyFrames array will contain the position of key frames in the encoded stream and
   1801      * mKeyFrameInterval - average key frame interval.
   1802      */
   1803     protected VpxEncodingStatistics computeEncodingStatistics(int encoderId,
   1804             ArrayList<MediaCodec.BufferInfo> bufferInfos ) {
   1805         VpxEncodingStatistics statistics = new VpxEncodingStatistics();
   1806 
   1807         int totalSize = 0;
   1808         int frames = 0;
   1809         int framesPerSecond = 0;
   1810         int totalFrameSizePerSecond = 0;
   1811         int maxFrameSize = 0;
   1812         int currentSecond;
   1813         int nextSecond = 0;
   1814         String keyFrameList = "  IFrame List: ";
   1815         String bitrateList = "  Bitrate list: ";
   1816         String framesList = "  FPS list: ";
   1817 
   1818 
   1819         for (int j = 0; j < bufferInfos.size(); j++) {
   1820             MediaCodec.BufferInfo info = bufferInfos.get(j);
   1821             currentSecond = (int)(info.presentationTimeUs / 1000000);
   1822             boolean lastFrame = (j == bufferInfos.size() - 1);
   1823             if (!lastFrame) {
   1824                 nextSecond = (int)(bufferInfos.get(j+1).presentationTimeUs / 1000000);
   1825             }
   1826 
   1827             totalSize += info.size;
   1828             totalFrameSizePerSecond += info.size;
   1829             maxFrameSize = Math.max(maxFrameSize, info.size);
   1830             framesPerSecond++;
   1831             frames++;
   1832 
   1833             // Update the bitrate statistics if the next frame will
   1834             // be for the next second
   1835             if (lastFrame || nextSecond > currentSecond) {
   1836                 int currentBitrate = totalFrameSizePerSecond * 8;
   1837                 bitrateList += (currentBitrate + " ");
   1838                 framesList += (framesPerSecond + " ");
   1839                 statistics.mBitrates.add(currentBitrate);
   1840                 statistics.mFrames.add(framesPerSecond);
   1841                 totalFrameSizePerSecond = 0;
   1842                 framesPerSecond = 0;
   1843             }
   1844 
   1845             // Update key frame statistics.
   1846             if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
   1847                 statistics.mKeyFrames.add(j);
   1848                 keyFrameList += (j + "  ");
   1849             }
   1850         }
   1851         int duration = (int)(bufferInfos.get(bufferInfos.size() - 1).presentationTimeUs / 1000);
   1852         duration = (duration + 500) / 1000;
   1853         statistics.mAverageBitrate = (int)(((long)totalSize * 8) / duration);
   1854         Log.d(TAG, "Statistics for encoder # " + encoderId);
   1855         // Calculate average key frame interval in frames.
   1856         int keyFrames = statistics.mKeyFrames.size();
   1857         if (keyFrames > 1) {
   1858             statistics.mAverageKeyFrameInterval =
   1859                     statistics.mKeyFrames.get(keyFrames - 1) - statistics.mKeyFrames.get(0);
   1860             statistics.mAverageKeyFrameInterval =
   1861                     Math.round((float)statistics.mAverageKeyFrameInterval / (keyFrames - 1));
   1862             for (int j = 1; j < keyFrames; j++) {
   1863                 int keyFrameInterval =
   1864                         statistics.mKeyFrames.get(j) - statistics.mKeyFrames.get(j - 1);
   1865                 statistics.mMaximumKeyFrameInterval =
   1866                         Math.max(statistics.mMaximumKeyFrameInterval, keyFrameInterval);
   1867                 statistics.mMinimumKeyFrameInterval =
   1868                         Math.min(statistics.mMinimumKeyFrameInterval, keyFrameInterval);
   1869             }
   1870             Log.d(TAG, "  Key frame intervals: Max: " + statistics.mMaximumKeyFrameInterval +
   1871                     ". Min: " + statistics.mMinimumKeyFrameInterval +
   1872                     ". Avg: " + statistics.mAverageKeyFrameInterval);
   1873         }
   1874         Log.d(TAG, "  Frames: " + frames + ". Duration: " + duration +
   1875                 ". Total size: " + totalSize + ". Key frames: " + keyFrames);
   1876         Log.d(TAG, keyFrameList);
   1877         Log.d(TAG, bitrateList);
   1878         Log.d(TAG, framesList);
   1879         Log.d(TAG, "  Bitrate average: " + statistics.mAverageBitrate);
   1880         Log.d(TAG, "  Maximum frame size: " + maxFrameSize);
   1881 
   1882         return statistics;
   1883     }
   1884 
   1885     protected VpxEncodingStatistics computeEncodingStatistics(
   1886             ArrayList<MediaCodec.BufferInfo> bufferInfos ) {
   1887         return computeEncodingStatistics(0, bufferInfos);
   1888     }
   1889 
   1890     protected ArrayList<VpxEncodingStatistics> computeSimulcastEncodingStatistics(
   1891             ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos) {
   1892         int numCodecs = bufferInfos.size();
   1893         ArrayList<VpxEncodingStatistics> statistics = new ArrayList<VpxEncodingStatistics>();
   1894 
   1895         for (int i = 0; i < numCodecs; i++) {
   1896             VpxEncodingStatistics currentStatistics =
   1897                     computeEncodingStatistics(i, bufferInfos.get(i));
   1898             statistics.add(currentStatistics);
   1899         }
   1900         return statistics;
   1901     }
   1902 
   1903     /**
   1904      * Calculates maximum latency for encoder/decoder based on buffer info array
   1905      * generated either by encoder or decoder.
   1906      */
   1907     protected int maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos) {
   1908         int maxValue = 0;
   1909         for (MediaCodec.BufferInfo bufferInfo : bufferInfos) {
   1910             maxValue = Math.max(maxValue,  bufferInfo.offset);
   1911         }
   1912         maxValue = (maxValue + 500) / 1000; // mcs -> ms
   1913         return maxValue;
   1914     }
   1915 
   1916     /**
   1917      * Decoding PSNR statistics.
   1918      */
   1919     protected class VpxDecodingStatistics {
   1920         VpxDecodingStatistics() {
   1921             mMinimumPSNR = Integer.MAX_VALUE;
   1922         }
   1923         public double mAveragePSNR;
   1924         public double mMinimumPSNR;
   1925     }
   1926 
   1927     /**
   1928      * Calculates PSNR value between two video frames.
   1929      */
   1930     private double computePSNR(byte[] data0, byte[] data1) {
   1931         long squareError = 0;
   1932         assertTrue(data0.length == data1.length);
   1933         int length = data0.length;
   1934         for (int i = 0 ; i < length; i++) {
   1935             int diff = ((int)data0[i] & 0xff) - ((int)data1[i] & 0xff);
   1936             squareError += diff * diff;
   1937         }
   1938         double meanSquareError = (double)squareError / length;
   1939         double psnr = 10 * Math.log10((double)255 * 255 / meanSquareError);
   1940         return psnr;
   1941     }
   1942 
   1943     /**
   1944      * Calculates average and minimum PSNR values between
   1945      * set of reference and decoded video frames.
   1946      * Runs PSNR calculation for the full duration of the decoded data.
   1947      */
   1948     protected VpxDecodingStatistics computeDecodingStatistics(
   1949             String referenceYuvFilename,
   1950             int referenceYuvRawId,
   1951             String decodedYuvFilename,
   1952             int width,
   1953             int height) throws Exception {
   1954         VpxDecodingStatistics statistics = new VpxDecodingStatistics();
   1955         InputStream referenceStream =
   1956                 OpenFileOrResourceId(referenceYuvFilename, referenceYuvRawId);
   1957         InputStream decodedStream = new FileInputStream(decodedYuvFilename);
   1958 
   1959         int ySize = width * height;
   1960         int uvSize = width * height / 4;
   1961         byte[] yRef = new byte[ySize];
   1962         byte[] yDec = new byte[ySize];
   1963         byte[] uvRef = new byte[uvSize];
   1964         byte[] uvDec = new byte[uvSize];
   1965 
   1966         int frames = 0;
   1967         double averageYPSNR = 0;
   1968         double averageUPSNR = 0;
   1969         double averageVPSNR = 0;
   1970         double minimumYPSNR = Integer.MAX_VALUE;
   1971         double minimumUPSNR = Integer.MAX_VALUE;
   1972         double minimumVPSNR = Integer.MAX_VALUE;
   1973         int minimumPSNRFrameIndex = 0;
   1974 
   1975         while (true) {
   1976             // Calculate Y PSNR.
   1977             int bytesReadRef = referenceStream.read(yRef);
   1978             int bytesReadDec = decodedStream.read(yDec);
   1979             if (bytesReadDec == -1) {
   1980                 break;
   1981             }
   1982             if (bytesReadRef == -1) {
   1983                 // Reference file wrapping up
   1984                 referenceStream.close();
   1985                 referenceStream =
   1986                         OpenFileOrResourceId(referenceYuvFilename, referenceYuvRawId);
   1987                 bytesReadRef = referenceStream.read(yRef);
   1988             }
   1989             double curYPSNR = computePSNR(yRef, yDec);
   1990             averageYPSNR += curYPSNR;
   1991             minimumYPSNR = Math.min(minimumYPSNR, curYPSNR);
   1992             double curMinimumPSNR = curYPSNR;
   1993 
   1994             // Calculate U PSNR.
   1995             bytesReadRef = referenceStream.read(uvRef);
   1996             bytesReadDec = decodedStream.read(uvDec);
   1997             double curUPSNR = computePSNR(uvRef, uvDec);
   1998             averageUPSNR += curUPSNR;
   1999             minimumUPSNR = Math.min(minimumUPSNR, curUPSNR);
   2000             curMinimumPSNR = Math.min(curMinimumPSNR, curUPSNR);
   2001 
   2002             // Calculate V PSNR.
   2003             bytesReadRef = referenceStream.read(uvRef);
   2004             bytesReadDec = decodedStream.read(uvDec);
   2005             double curVPSNR = computePSNR(uvRef, uvDec);
   2006             averageVPSNR += curVPSNR;
   2007             minimumVPSNR = Math.min(minimumVPSNR, curVPSNR);
   2008             curMinimumPSNR = Math.min(curMinimumPSNR, curVPSNR);
   2009 
   2010             // Frame index for minimum PSNR value - help to detect possible distortions
   2011             if (curMinimumPSNR < statistics.mMinimumPSNR) {
   2012                 statistics.mMinimumPSNR = curMinimumPSNR;
   2013                 minimumPSNRFrameIndex = frames;
   2014             }
   2015 
   2016             String logStr = String.format(Locale.US, "PSNR #%d: Y: %.2f. U: %.2f. V: %.2f",
   2017                     frames, curYPSNR, curUPSNR, curVPSNR);
   2018             Log.v(TAG, logStr);
   2019 
   2020             frames++;
   2021         }
   2022 
   2023         averageYPSNR /= frames;
   2024         averageUPSNR /= frames;
   2025         averageVPSNR /= frames;
   2026         statistics.mAveragePSNR = (4 * averageYPSNR + averageUPSNR + averageVPSNR) / 6;
   2027 
   2028         Log.d(TAG, "PSNR statistics for " + frames + " frames.");
   2029         String logStr = String.format(Locale.US,
   2030                 "Average PSNR: Y: %.1f. U: %.1f. V: %.1f. Average: %.1f",
   2031                 averageYPSNR, averageUPSNR, averageVPSNR, statistics.mAveragePSNR);
   2032         Log.d(TAG, logStr);
   2033         logStr = String.format(Locale.US,
   2034                 "Minimum PSNR: Y: %.1f. U: %.1f. V: %.1f. Overall: %.1f at frame %d",
   2035                 minimumYPSNR, minimumUPSNR, minimumVPSNR,
   2036                 statistics.mMinimumPSNR, minimumPSNRFrameIndex);
   2037         Log.d(TAG, logStr);
   2038 
   2039         referenceStream.close();
   2040         decodedStream.close();
   2041         return statistics;
   2042     }
   2043 }
   2044