1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import android.content.Context; 20 import android.content.res.Resources; 21 import android.media.MediaCodec; 22 import android.media.MediaCodec.CodecException; 23 import android.media.MediaCodecInfo.CodecCapabilities; 24 import android.media.MediaCodecList; 25 import android.media.MediaCodecInfo; 26 import android.media.MediaFormat; 27 import android.os.Bundle; 28 import android.os.Environment; 29 import android.os.Looper; 30 import android.os.Handler; 31 import android.test.AndroidTestCase; 32 import android.util.Log; 33 import android.media.cts.R; 34 35 import java.io.File; 36 import java.io.FileInputStream; 37 import java.io.FileOutputStream; 38 import java.io.InputStream; 39 import java.nio.ByteBuffer; 40 import java.util.Locale; 41 import java.util.ArrayList; 42 import java.util.concurrent.Callable; 43 import java.util.concurrent.CountDownLatch; 44 45 /** 46 * Verification test for vpx encoder and decoder. 47 * 48 * A raw yv12 stream is encoded at various settings and written to an IVF 49 * file. Encoded stream bitrate and key frame interval are checked against target values. 50 * The stream is later decoded by the decoder to verify frames are decodable and to 51 * calculate PSNR values for various bitrates. 52 */ 53 public class VpxCodecTestBase extends AndroidTestCase { 54 55 protected static final String TAG = "VPxCodecTestBase"; 56 protected static final String VP8_MIME = MediaFormat.MIMETYPE_VIDEO_VP8; 57 protected static final String VP9_MIME = MediaFormat.MIMETYPE_VIDEO_VP9; 58 private static final String GOOGLE_CODEC_PREFIX = "omx.google."; 59 protected static final String SDCARD_DIR = 60 Environment.getExternalStorageDirectory().getAbsolutePath(); 61 62 // Default timeout for MediaCodec buffer dequeue - 200 ms. 63 protected static final long DEFAULT_DEQUEUE_TIMEOUT_US = 200000; 64 // Default timeout for MediaEncoderAsync - 30 sec. 65 protected static final long DEFAULT_ENCODE_TIMEOUT_MS = 30000; 66 // Default sync frame interval in frames (zero means allow the encoder to auto-select 67 // key frame interval). 68 private static final int SYNC_FRAME_INTERVAL = 0; 69 // Video bitrate type - should be set to OMX_Video_ControlRateConstant from OMX_Video.h 70 protected static final int VIDEO_ControlRateVariable = 1; 71 protected static final int VIDEO_ControlRateConstant = 2; 72 // NV12 color format supported by QCOM codec, but not declared in MediaCodec - 73 // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h 74 private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04; 75 // Allowable color formats supported by codec - in order of preference. 76 private static final int[] mSupportedColorList = { 77 CodecCapabilities.COLOR_FormatYUV420Planar, 78 CodecCapabilities.COLOR_FormatYUV420SemiPlanar, 79 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, 80 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m 81 }; 82 // Scaled image cache list - contains scale factors, for which up-scaled frames 83 // were calculated and were written to yuv file. 84 ArrayList<Integer> mScaledImages = new ArrayList<Integer>(); 85 86 private Resources mResources; 87 88 @Override 89 public void setContext(Context context) { 90 super.setContext(context); 91 mResources = mContext.getResources(); 92 } 93 94 /** 95 * VPx codec properties generated by getVpxCodecProperties() function. 96 */ 97 private class CodecProperties { 98 CodecProperties(String codecName, int colorFormat) { 99 this.codecName = codecName; 100 this.colorFormat = colorFormat; 101 } 102 public boolean isGoogleCodec() { 103 return codecName.toLowerCase().startsWith(GOOGLE_CODEC_PREFIX); 104 } 105 106 public final String codecName; // OpenMax component name for VPx codec. 107 public final int colorFormat; // Color format supported by codec. 108 } 109 110 /** 111 * Function to find VPx codec. 112 * 113 * Iterates through the list of available codecs and tries to find 114 * VPX codec, which can support either YUV420 planar or NV12 color formats. 115 * If forceGoogleCodec parameter set to true the function always returns 116 * Google VPX codec. 117 * If forceGoogleCodec parameter set to false the functions looks for platform 118 * specific VPX codec first. If no platform specific codec exist, falls back to 119 * Google VPX codec. 120 * 121 * @param isEncoder Flag if encoder is requested. 122 * @param forceGoogleCodec Forces to use Google codec. 123 */ 124 private CodecProperties getVpxCodecProperties( 125 boolean isEncoder, 126 MediaFormat format, 127 boolean forceGoogleCodec) throws Exception { 128 CodecProperties codecProperties = null; 129 String mime = format.getString(MediaFormat.KEY_MIME); 130 131 // Loop through the list of omx components in case platform specific codec 132 // is requested. 133 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 134 for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) { 135 if (isEncoder != codecInfo.isEncoder()) { 136 continue; 137 } 138 Log.v(TAG, codecInfo.getName()); 139 // TODO: remove dependence of Google from the test 140 // Check if this is Google codec - we should ignore it. 141 boolean isGoogleCodec = 142 codecInfo.getName().toLowerCase().startsWith(GOOGLE_CODEC_PREFIX); 143 if (!isGoogleCodec && forceGoogleCodec) { 144 continue; 145 } 146 147 for (String type : codecInfo.getSupportedTypes()) { 148 if (!type.equalsIgnoreCase(mime)) { 149 continue; 150 } 151 CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(type); 152 if (!capabilities.isFormatSupported(format)) { 153 continue; 154 } 155 156 // Get candidate codec properties. 157 Log.v(TAG, "Found candidate codec " + codecInfo.getName()); 158 for (int colorFormat: capabilities.colorFormats) { 159 Log.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); 160 } 161 162 // Check supported color formats. 163 for (int supportedColorFormat : mSupportedColorList) { 164 for (int codecColorFormat : capabilities.colorFormats) { 165 if (codecColorFormat == supportedColorFormat) { 166 codecProperties = new CodecProperties(codecInfo.getName(), 167 codecColorFormat); 168 Log.v(TAG, "Found target codec " + codecProperties.codecName + 169 ". Color: 0x" + Integer.toHexString(codecColorFormat)); 170 // return first HW codec found 171 if (!isGoogleCodec) { 172 return codecProperties; 173 } 174 } 175 } 176 } 177 } 178 } 179 if (codecProperties == null) { 180 Log.i(TAG, "no suitable " + (forceGoogleCodec ? "google " : "") 181 + (isEncoder ? "encoder " : "decoder ") + "found for " + format); 182 } 183 return codecProperties; 184 } 185 186 /** 187 * Parameters for encoded video stream. 188 */ 189 protected class EncoderOutputStreamParameters { 190 // Name of raw YUV420 input file. When the value of this parameter 191 // is set to null input file descriptor from inputResourceId parameter 192 // is used instead. 193 public String inputYuvFilename; 194 // Name of scaled YUV420 input file. 195 public String scaledYuvFilename; 196 // File descriptor for the raw input file (YUV420). Used only if 197 // inputYuvFilename parameter is null. 198 int inputResourceId; 199 // Name of the IVF file to write encoded bitsream 200 public String outputIvfFilename; 201 // Mime Type of the Encoded content. 202 public String codecMimeType; 203 // Force to use Google VPx encoder. 204 boolean forceGoogleEncoder; 205 // Number of frames to encode. 206 int frameCount; 207 // Frame rate of input file in frames per second. 208 int frameRate; 209 // Encoded frame width. 210 public int frameWidth; 211 // Encoded frame height. 212 public int frameHeight; 213 // Encoding bitrate array in bits/second for every frame. If array length 214 // is shorter than the total number of frames, the last value is re-used for 215 // all remaining frames. For constant bitrate encoding single element 216 // array can be used with first element set to target bitrate value. 217 public int[] bitrateSet; 218 // Encoding bitrate type - VBR or CBR 219 public int bitrateType; 220 // Number of temporal layers 221 public int temporalLayers; 222 // Desired key frame interval - codec is asked to generate key frames 223 // at a period defined by this parameter. 224 public int syncFrameInterval; 225 // Optional parameter - forced key frame interval. Used to 226 // explicitly request the codec to generate key frames using 227 // MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME parameter. 228 public int syncForceFrameInterval; 229 // Buffer timeout 230 long timeoutDequeue; 231 // Flag if encoder should run in Looper thread. 232 boolean runInLooperThread; 233 // Flag if use NdkMediaCodec 234 boolean useNdk; 235 } 236 237 /** 238 * Generates an array of default parameters for encoder output stream based on 239 * upscaling value. 240 */ 241 protected ArrayList<EncoderOutputStreamParameters> getDefaultEncodingParameterList( 242 String inputYuvName, 243 String outputIvfBaseName, 244 String codecMimeType, 245 int encodeSeconds, 246 int[] resolutionScales, 247 int frameWidth, 248 int frameHeight, 249 int frameRate, 250 int bitrateMode, 251 int[] bitrates, 252 boolean syncEncoding) { 253 assertTrue(resolutionScales.length == bitrates.length); 254 int numCodecs = resolutionScales.length; 255 ArrayList<EncoderOutputStreamParameters> outputParameters = 256 new ArrayList<EncoderOutputStreamParameters>(numCodecs); 257 for (int i = 0; i < numCodecs; i++) { 258 EncoderOutputStreamParameters params = new EncoderOutputStreamParameters(); 259 if (inputYuvName != null) { 260 params.inputYuvFilename = SDCARD_DIR + File.separator + inputYuvName; 261 } else { 262 params.inputYuvFilename = null; 263 } 264 params.scaledYuvFilename = SDCARD_DIR + File.separator + 265 outputIvfBaseName + resolutionScales[i]+ ".yuv"; 266 params.inputResourceId = R.raw.football_qvga; 267 params.codecMimeType = codecMimeType; 268 String codecSuffix = VP8_MIME.equals(codecMimeType) ? "vp8" : "vp9"; 269 params.outputIvfFilename = SDCARD_DIR + File.separator + 270 outputIvfBaseName + resolutionScales[i] + "_" + codecSuffix + ".ivf"; 271 params.forceGoogleEncoder = false; 272 params.frameCount = encodeSeconds * frameRate; 273 params.frameRate = frameRate; 274 params.frameWidth = Math.min(frameWidth * resolutionScales[i], 1280); 275 params.frameHeight = Math.min(frameHeight * resolutionScales[i], 720); 276 params.bitrateSet = new int[1]; 277 params.bitrateSet[0] = bitrates[i]; 278 params.bitrateType = bitrateMode; 279 params.temporalLayers = 0; 280 params.syncFrameInterval = SYNC_FRAME_INTERVAL; 281 params.syncForceFrameInterval = 0; 282 if (syncEncoding) { 283 params.timeoutDequeue = DEFAULT_DEQUEUE_TIMEOUT_US; 284 params.runInLooperThread = false; 285 } else { 286 params.timeoutDequeue = 0; 287 params.runInLooperThread = true; 288 } 289 outputParameters.add(params); 290 } 291 return outputParameters; 292 } 293 294 protected EncoderOutputStreamParameters getDefaultEncodingParameters( 295 String inputYuvName, 296 String outputIvfBaseName, 297 String codecMimeType, 298 int encodeSeconds, 299 int frameWidth, 300 int frameHeight, 301 int frameRate, 302 int bitrateMode, 303 int bitrate, 304 boolean syncEncoding) { 305 int[] scaleValues = { 1 }; 306 int[] bitrates = { bitrate }; 307 return getDefaultEncodingParameterList( 308 inputYuvName, 309 outputIvfBaseName, 310 codecMimeType, 311 encodeSeconds, 312 scaleValues, 313 frameWidth, 314 frameHeight, 315 frameRate, 316 bitrateMode, 317 bitrates, 318 syncEncoding).get(0); 319 } 320 321 /** 322 * Converts (interleaves) YUV420 planar to NV12. 323 * Assumes packed, macroblock-aligned frame with no cropping 324 * (visible/coded row length == stride). 325 */ 326 private static byte[] YUV420ToNV(int width, int height, byte[] yuv) { 327 byte[] nv = new byte[yuv.length]; 328 // Y plane we just copy. 329 System.arraycopy(yuv, 0, nv, 0, width * height); 330 331 // U & V plane we interleave. 332 int u_offset = width * height; 333 int v_offset = u_offset + u_offset / 4; 334 int nv_offset = width * height; 335 for (int i = 0; i < width * height / 4; i++) { 336 nv[nv_offset++] = yuv[u_offset++]; 337 nv[nv_offset++] = yuv[v_offset++]; 338 } 339 return nv; 340 } 341 342 /** 343 * Converts (de-interleaves) NV12 to YUV420 planar. 344 * Stride may be greater than width, slice height may be greater than height. 345 */ 346 private static byte[] NV12ToYUV420(int width, int height, 347 int stride, int sliceHeight, byte[] nv12) { 348 byte[] yuv = new byte[width * height * 3 / 2]; 349 350 // Y plane we just copy. 351 for (int i = 0; i < height; i++) { 352 System.arraycopy(nv12, i * stride, yuv, i * width, width); 353 } 354 355 // U & V plane - de-interleave. 356 int u_offset = width * height; 357 int v_offset = u_offset + u_offset / 4; 358 int nv_offset; 359 for (int i = 0; i < height / 2; i++) { 360 nv_offset = stride * (sliceHeight + i); 361 for (int j = 0; j < width / 2; j++) { 362 yuv[u_offset++] = nv12[nv_offset++]; 363 yuv[v_offset++] = nv12[nv_offset++]; 364 } 365 } 366 return yuv; 367 } 368 369 /** 370 * Packs YUV420 frame by moving it to a smaller size buffer with stride and slice 371 * height equal to the original frame width and height. 372 */ 373 private static byte[] PackYUV420(int width, int height, 374 int stride, int sliceHeight, byte[] src) { 375 byte[] dst = new byte[width * height * 3 / 2]; 376 // Y copy. 377 for (int i = 0; i < height; i++) { 378 System.arraycopy(src, i * stride, dst, i * width, width); 379 } 380 // U and V copy. 381 int u_src_offset = stride * sliceHeight; 382 int v_src_offset = u_src_offset + u_src_offset / 4; 383 int u_dst_offset = width * height; 384 int v_dst_offset = u_dst_offset + u_dst_offset / 4; 385 for (int i = 0; i < height / 2; i++) { 386 System.arraycopy(src, u_src_offset + i * (stride / 2), 387 dst, u_dst_offset + i * (width / 2), width / 2); 388 System.arraycopy(src, v_src_offset + i * (stride / 2), 389 dst, v_dst_offset + i * (width / 2), width / 2); 390 } 391 return dst; 392 } 393 394 395 private static void imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride, 396 byte[] dst, int dstByteOffset, int dstWidth, int dstHeight) { 397 for (int i = 0; i < dstHeight/2 - 1; i++) { 398 int dstOffset0 = 2 * i * dstWidth + dstByteOffset; 399 int dstOffset1 = dstOffset0 + dstWidth; 400 int srcOffset0 = i * srcStride + srcByteOffset; 401 int srcOffset1 = srcOffset0 + srcStride; 402 int pixel00 = (int)src[srcOffset0++] & 0xff; 403 int pixel10 = (int)src[srcOffset1++] & 0xff; 404 for (int j = 0; j < dstWidth/2 - 1; j++) { 405 int pixel01 = (int)src[srcOffset0++] & 0xff; 406 int pixel11 = (int)src[srcOffset1++] & 0xff; 407 dst[dstOffset0++] = (byte)pixel00; 408 dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2); 409 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2); 410 dst[dstOffset1++] = (byte)((pixel00 + pixel01 + pixel10 + pixel11 + 2) / 4); 411 pixel00 = pixel01; 412 pixel10 = pixel11; 413 } 414 // last column 415 dst[dstOffset0++] = (byte)pixel00; 416 dst[dstOffset0++] = (byte)pixel00; 417 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2); 418 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2); 419 } 420 421 // last row 422 int dstOffset0 = (dstHeight - 2) * dstWidth + dstByteOffset; 423 int dstOffset1 = dstOffset0 + dstWidth; 424 int srcOffset0 = (dstHeight/2 - 1) * srcStride + srcByteOffset; 425 int pixel00 = (int)src[srcOffset0++] & 0xff; 426 for (int j = 0; j < dstWidth/2 - 1; j++) { 427 int pixel01 = (int)src[srcOffset0++] & 0xff; 428 dst[dstOffset0++] = (byte)pixel00; 429 dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2); 430 dst[dstOffset1++] = (byte)pixel00; 431 dst[dstOffset1++] = (byte)((pixel00 + pixel01 + 1) / 2); 432 pixel00 = pixel01; 433 } 434 // the very last pixel - bottom right 435 dst[dstOffset0++] = (byte)pixel00; 436 dst[dstOffset0++] = (byte)pixel00; 437 dst[dstOffset1++] = (byte)pixel00; 438 dst[dstOffset1++] = (byte)pixel00; 439 } 440 441 /** 442 * Up-scale image. 443 * Scale factor is defined by source and destination width ratio. 444 * Only 1:2 and 1:4 up-scaling is supported for now. 445 * For 640x480 -> 1280x720 conversion only top 640x360 part of the original 446 * image is scaled. 447 */ 448 private static byte[] imageScale(byte[] src, int srcWidth, int srcHeight, 449 int dstWidth, int dstHeight) throws Exception { 450 int srcYSize = srcWidth * srcHeight; 451 int dstYSize = dstWidth * dstHeight; 452 byte[] dst = null; 453 if (dstWidth == 2 * srcWidth && dstHeight <= 2 * srcHeight) { 454 // 1:2 upscale 455 dst = new byte[dstWidth * dstHeight * 3 / 2]; 456 imageUpscale1To2(src, 0, srcWidth, 457 dst, 0, dstWidth, dstHeight); // Y 458 imageUpscale1To2(src, srcYSize, srcWidth / 2, 459 dst, dstYSize, dstWidth / 2, dstHeight / 2); // U 460 imageUpscale1To2(src, srcYSize * 5 / 4, srcWidth / 2, 461 dst, dstYSize * 5 / 4, dstWidth / 2, dstHeight / 2); // V 462 } else if (dstWidth == 4 * srcWidth && dstHeight <= 4 * srcHeight) { 463 // 1:4 upscale - in two steps 464 int midWidth = 2 * srcWidth; 465 int midHeight = 2 * srcHeight; 466 byte[] midBuffer = imageScale(src, srcWidth, srcHeight, midWidth, midHeight); 467 dst = imageScale(midBuffer, midWidth, midHeight, dstWidth, dstHeight); 468 469 } else { 470 throw new RuntimeException("Can not find proper scaling function"); 471 } 472 473 return dst; 474 } 475 476 private void cacheScaledImage( 477 String srcYuvFilename, int srcResourceId, int srcFrameWidth, int srcFrameHeight, 478 String dstYuvFilename, int dstFrameWidth, int dstFrameHeight) throws Exception { 479 InputStream srcStream = OpenFileOrResourceId(srcYuvFilename, srcResourceId); 480 FileOutputStream dstFile = new FileOutputStream(dstYuvFilename, false); 481 int srcFrameSize = srcFrameWidth * srcFrameHeight * 3 / 2; 482 byte[] srcFrame = new byte[srcFrameSize]; 483 byte[] dstFrame = null; 484 Log.d(TAG, "Scale to " + dstFrameWidth + " x " + dstFrameHeight + ". -> " + dstYuvFilename); 485 while (true) { 486 int bytesRead = srcStream.read(srcFrame); 487 if (bytesRead != srcFrame.length) { 488 break; 489 } 490 if (dstFrameWidth == srcFrameWidth && dstFrameHeight == srcFrameHeight) { 491 dstFrame = srcFrame; 492 } else { 493 dstFrame = imageScale(srcFrame, srcFrameWidth, srcFrameHeight, 494 dstFrameWidth, dstFrameHeight); 495 } 496 dstFile.write(dstFrame); 497 } 498 srcStream.close(); 499 dstFile.close(); 500 } 501 502 503 /** 504 * A basic check if an encoded stream is decodable. 505 * 506 * The most basic confirmation we can get about a frame 507 * being properly encoded is trying to decode it. 508 * (Especially in realtime mode encode output is non- 509 * deterministic, therefore a more thorough check like 510 * md5 sum comparison wouldn't work.) 511 * 512 * Indeed, MediaCodec will raise an IllegalStateException 513 * whenever vpx decoder fails to decode a frame, and 514 * this test uses that fact to verify the bitstream. 515 * 516 * @param inputIvfFilename The name of the IVF file containing encoded bitsream. 517 * @param outputYuvFilename The name of the output YUV file (optional). 518 * @param frameRate Frame rate of input file in frames per second 519 * @param forceGoogleDecoder Force to use Google VPx decoder. 520 */ 521 protected ArrayList<MediaCodec.BufferInfo> decode( 522 String inputIvfFilename, 523 String outputYuvFilename, 524 String codecMimeType, 525 int frameRate, 526 boolean forceGoogleDecoder) throws Exception { 527 ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>(); 528 529 // Open input/output. 530 IvfReader ivf = new IvfReader(inputIvfFilename); 531 int frameWidth = ivf.getWidth(); 532 int frameHeight = ivf.getHeight(); 533 int frameCount = ivf.getFrameCount(); 534 int frameStride = frameWidth; 535 int frameSliceHeight = frameHeight; 536 assertTrue(frameWidth > 0); 537 assertTrue(frameHeight > 0); 538 assertTrue(frameCount > 0); 539 540 // Create decoder. 541 MediaFormat format = MediaFormat.createVideoFormat( 542 codecMimeType, ivf.getWidth(), ivf.getHeight()); 543 CodecProperties properties = getVpxCodecProperties( 544 false /* encoder */, format, forceGoogleDecoder); 545 if (properties == null) { 546 ivf.close(); 547 return null; 548 } 549 int frameColorFormat = properties.colorFormat; 550 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 551 552 FileOutputStream yuv = null; 553 if (outputYuvFilename != null) { 554 yuv = new FileOutputStream(outputYuvFilename, false); 555 } 556 557 Log.d(TAG, "Creating decoder " + properties.codecName + 558 ". Color format: 0x" + Integer.toHexString(frameColorFormat) + 559 ". " + frameWidth + " x " + frameHeight); 560 Log.d(TAG, " Format: " + format); 561 Log.d(TAG, " In: " + inputIvfFilename + ". Out:" + outputYuvFilename); 562 MediaCodec decoder = MediaCodec.createByCodecName(properties.codecName); 563 decoder.configure(format, 564 null, // surface 565 null, // crypto 566 0); // flags 567 decoder.start(); 568 569 ByteBuffer[] inputBuffers = decoder.getInputBuffers(); 570 ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); 571 MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); 572 573 // decode loop 574 int inputFrameIndex = 0; 575 int outputFrameIndex = 0; 576 long inPresentationTimeUs = 0; 577 long outPresentationTimeUs = 0; 578 boolean sawOutputEOS = false; 579 boolean sawInputEOS = false; 580 581 while (!sawOutputEOS) { 582 if (!sawInputEOS) { 583 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_DEQUEUE_TIMEOUT_US); 584 if (inputBufIndex >= 0) { 585 byte[] frame = ivf.readFrame(inputFrameIndex); 586 587 if (inputFrameIndex == frameCount - 1) { 588 Log.d(TAG, " Input EOS for frame # " + inputFrameIndex); 589 sawInputEOS = true; 590 } 591 592 inputBuffers[inputBufIndex].clear(); 593 inputBuffers[inputBufIndex].put(frame); 594 inputBuffers[inputBufIndex].rewind(); 595 inPresentationTimeUs = (inputFrameIndex * 1000000) / frameRate; 596 597 decoder.queueInputBuffer( 598 inputBufIndex, 599 0, // offset 600 frame.length, 601 inPresentationTimeUs, 602 sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); 603 604 inputFrameIndex++; 605 } 606 } 607 608 int result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US); 609 while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || 610 result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 611 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 612 outputBuffers = decoder.getOutputBuffers(); 613 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 614 // Process format change 615 format = decoder.getOutputFormat(); 616 frameWidth = format.getInteger(MediaFormat.KEY_WIDTH); 617 frameHeight = format.getInteger(MediaFormat.KEY_HEIGHT); 618 frameColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 619 Log.d(TAG, "Decoder output format change. Color: 0x" + 620 Integer.toHexString(frameColorFormat)); 621 Log.d(TAG, "Format: " + format.toString()); 622 623 // Parse frame and slice height from undocumented values 624 if (format.containsKey("stride")) { 625 frameStride = format.getInteger("stride"); 626 } else { 627 frameStride = frameWidth; 628 } 629 if (format.containsKey("slice-height")) { 630 frameSliceHeight = format.getInteger("slice-height"); 631 } else { 632 frameSliceHeight = frameHeight; 633 } 634 Log.d(TAG, "Frame stride and slice height: " + frameStride + 635 " x " + frameSliceHeight); 636 frameStride = Math.max(frameWidth, frameStride); 637 frameSliceHeight = Math.max(frameHeight, frameSliceHeight); 638 } 639 result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US); 640 } 641 if (result >= 0) { 642 int outputBufIndex = result; 643 outPresentationTimeUs = bufferInfo.presentationTimeUs; 644 Log.v(TAG, "Writing buffer # " + outputFrameIndex + 645 ". Size: " + bufferInfo.size + 646 ". InTime: " + (inPresentationTimeUs + 500)/1000 + 647 ". OutTime: " + (outPresentationTimeUs + 500)/1000); 648 if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 649 sawOutputEOS = true; 650 Log.d(TAG, " Output EOS for frame # " + outputFrameIndex); 651 } 652 653 if (bufferInfo.size > 0) { 654 // Save decoder output to yuv file. 655 if (yuv != null) { 656 byte[] frame = new byte[bufferInfo.size]; 657 outputBuffers[outputBufIndex].position(bufferInfo.offset); 658 outputBuffers[outputBufIndex].get(frame, 0, bufferInfo.size); 659 // Convert NV12 to YUV420 if necessary. 660 if (frameColorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) { 661 frame = NV12ToYUV420(frameWidth, frameHeight, 662 frameStride, frameSliceHeight, frame); 663 } 664 int writeLength = Math.min(frameWidth * frameHeight * 3 / 2, frame.length); 665 // Pack frame if necessary. 666 if (writeLength < frame.length && 667 (frameStride > frameWidth || frameSliceHeight > frameHeight)) { 668 frame = PackYUV420(frameWidth, frameHeight, 669 frameStride, frameSliceHeight, frame); 670 } 671 yuv.write(frame, 0, writeLength); 672 } 673 outputFrameIndex++; 674 675 // Update statistics - store presentation time delay in offset 676 long presentationTimeUsDelta = inPresentationTimeUs - outPresentationTimeUs; 677 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 678 bufferInfoCopy.set((int)presentationTimeUsDelta, bufferInfo.size, 679 outPresentationTimeUs, bufferInfo.flags); 680 bufferInfos.add(bufferInfoCopy); 681 } 682 decoder.releaseOutputBuffer(outputBufIndex, false); 683 } 684 } 685 decoder.stop(); 686 decoder.release(); 687 ivf.close(); 688 if (yuv != null) { 689 yuv.close(); 690 } 691 692 return bufferInfos; 693 } 694 695 696 /** 697 * Helper function to return InputStream from either filename (if set) 698 * or resource id (if filename is not set). 699 */ 700 private InputStream OpenFileOrResourceId(String filename, int resourceId) throws Exception { 701 if (filename != null) { 702 return new FileInputStream(filename); 703 } 704 return mResources.openRawResource(resourceId); 705 } 706 707 /** 708 * Results of frame encoding. 709 */ 710 protected class MediaEncoderOutput { 711 public long inPresentationTimeUs; 712 public long outPresentationTimeUs; 713 public boolean outputGenerated; 714 public int flags; 715 public byte[] buffer; 716 } 717 718 protected class MediaEncoderAsyncHelper { 719 private final EncoderOutputStreamParameters mStreamParams; 720 private final CodecProperties mProperties; 721 private final ArrayList<MediaCodec.BufferInfo> mBufferInfos; 722 private final IvfWriter mIvf; 723 private final byte[] mSrcFrame; 724 725 private InputStream mYuvStream; 726 private int mInputFrameIndex; 727 728 MediaEncoderAsyncHelper( 729 EncoderOutputStreamParameters streamParams, 730 CodecProperties properties, 731 ArrayList<MediaCodec.BufferInfo> bufferInfos, 732 IvfWriter ivf) 733 throws Exception { 734 mStreamParams = streamParams; 735 mProperties = properties; 736 mBufferInfos = bufferInfos; 737 mIvf = ivf; 738 739 int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2; 740 mSrcFrame = new byte[srcFrameSize]; 741 742 mYuvStream = OpenFileOrResourceId( 743 streamParams.inputYuvFilename, streamParams.inputResourceId); 744 } 745 746 public byte[] getInputFrame() { 747 // Check EOS 748 if (mStreamParams.frameCount == 0 749 || (mStreamParams.frameCount > 0 750 && mInputFrameIndex >= mStreamParams.frameCount)) { 751 Log.d(TAG, "---Sending EOS empty frame for frame # " + mInputFrameIndex); 752 return null; 753 } 754 755 try { 756 int bytesRead = mYuvStream.read(mSrcFrame); 757 758 if (bytesRead == -1) { 759 // rewind to beginning of file 760 mYuvStream.close(); 761 mYuvStream = OpenFileOrResourceId( 762 mStreamParams.inputYuvFilename, mStreamParams.inputResourceId); 763 bytesRead = mYuvStream.read(mSrcFrame); 764 } 765 } catch (Exception e) { 766 Log.e(TAG, "Failed to read YUV file."); 767 return null; 768 } 769 mInputFrameIndex++; 770 771 // Convert YUV420 to NV12 if necessary 772 if (mProperties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) { 773 return YUV420ToNV(mStreamParams.frameWidth, mStreamParams.frameHeight, 774 mSrcFrame); 775 } else { 776 return mSrcFrame; 777 } 778 } 779 780 public boolean saveOutputFrame(MediaEncoderOutput out) { 781 if (out.outputGenerated) { 782 if (out.buffer.length > 0) { 783 // Save frame 784 try { 785 mIvf.writeFrame(out.buffer, out.outPresentationTimeUs); 786 } catch (Exception e) { 787 Log.d(TAG, "Failed to write frame"); 788 return true; 789 } 790 791 // Update statistics - store presentation time delay in offset 792 long presentationTimeUsDelta = out.inPresentationTimeUs - 793 out.outPresentationTimeUs; 794 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 795 bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length, 796 out.outPresentationTimeUs, out.flags); 797 mBufferInfos.add(bufferInfoCopy); 798 } 799 // Detect output EOS 800 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 801 Log.d(TAG, "----Output EOS "); 802 return true; 803 } 804 } 805 return false; 806 } 807 } 808 809 /** 810 * Video encoder wrapper class. 811 * Allows to run the encoder either in a callee's thread or in a looper thread 812 * using buffer dequeue ready notification callbacks. 813 * 814 * Function feedInput() is used to send raw video frame to the encoder input. When encoder 815 * is configured to run in async mode the function will run in a looper thread. 816 * Encoded frame can be retrieved by calling getOutput() function. 817 */ 818 protected class MediaEncoderAsync extends Thread { 819 private int mId; 820 private MediaCodecWrapper mCodec; 821 private ByteBuffer[] mInputBuffers; 822 private ByteBuffer[] mOutputBuffers; 823 private int mInputFrameIndex; 824 private int mOutputFrameIndex; 825 private int mInputBufIndex; 826 private int mFrameRate; 827 private long mTimeout; 828 private MediaCodec.BufferInfo mBufferInfo; 829 private long mInPresentationTimeUs; 830 private long mOutPresentationTimeUs; 831 private boolean mAsync; 832 // Flag indicating if input frame was consumed by the encoder in feedInput() call. 833 private boolean mConsumedInput; 834 // Result of frame encoding returned by getOutput() call. 835 private MediaEncoderOutput mOutput; 836 // Object used to signal that looper thread has started and Handler instance associated 837 // with looper thread has been allocated. 838 private final Object mThreadEvent = new Object(); 839 // Object used to signal that MediaCodec buffer dequeue notification callback 840 // was received. 841 private final Object mCallbackEvent = new Object(); 842 private Handler mHandler; 843 private boolean mCallbackReceived; 844 private MediaEncoderAsyncHelper mHelper; 845 private final Object mCompletionEvent = new Object(); 846 private boolean mCompleted; 847 848 private MediaCodec.Callback mCallback = new MediaCodec.Callback() { 849 @Override 850 public void onInputBufferAvailable(MediaCodec codec, int index) { 851 if (mHelper == null) { 852 Log.e(TAG, "async helper not available"); 853 return; 854 } 855 856 byte[] encFrame = mHelper.getInputFrame(); 857 boolean inputEOS = (encFrame == null); 858 859 int encFrameLength = 0; 860 int flags = 0; 861 if (inputEOS) { 862 flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 863 } else { 864 encFrameLength = encFrame.length; 865 866 ByteBuffer byteBuffer = mCodec.getInputBuffer(index); 867 byteBuffer.put(encFrame); 868 byteBuffer.rewind(); 869 870 mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate; 871 872 Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex + 873 ". InTime: " + (mInPresentationTimeUs + 500)/1000); 874 875 mInputFrameIndex++; 876 } 877 878 mCodec.queueInputBuffer( 879 index, 880 0, // offset 881 encFrameLength, // size 882 mInPresentationTimeUs, 883 flags); 884 } 885 886 @Override 887 public void onOutputBufferAvailable(MediaCodec codec, 888 int index, MediaCodec.BufferInfo info) { 889 if (mHelper == null) { 890 Log.e(TAG, "async helper not available"); 891 return; 892 } 893 894 MediaEncoderOutput out = new MediaEncoderOutput(); 895 896 out.buffer = new byte[info.size]; 897 ByteBuffer outputBuffer = mCodec.getOutputBuffer(index); 898 outputBuffer.get(out.buffer, 0, info.size); 899 mOutPresentationTimeUs = info.presentationTimeUs; 900 901 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex; 902 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 903 logStr += " CONFIG. "; 904 } 905 if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) { 906 logStr += " KEY. "; 907 } 908 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 909 logStr += " EOS. "; 910 } 911 logStr += " Size: " + info.size; 912 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 + 913 ". OutTime: " + (mOutPresentationTimeUs + 500)/1000; 914 Log.v(TAG, logStr); 915 916 if (mOutputFrameIndex == 0 && 917 ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) == 0) ) { 918 throw new RuntimeException("First frame is not a sync frame."); 919 } 920 921 if (info.size > 0) { 922 mOutputFrameIndex++; 923 out.inPresentationTimeUs = mInPresentationTimeUs; 924 out.outPresentationTimeUs = mOutPresentationTimeUs; 925 } 926 mCodec.releaseOutputBuffer(index, false); 927 928 out.flags = info.flags; 929 out.outputGenerated = true; 930 931 if (mHelper.saveOutputFrame(out)) { 932 // output EOS 933 signalCompletion(); 934 } 935 } 936 937 @Override 938 public void onError(MediaCodec codec, CodecException e) { 939 Log.e(TAG, "onError: " + e 940 + ", transient " + e.isTransient() 941 + ", recoverable " + e.isRecoverable() 942 + ", error " + e.getErrorCode()); 943 } 944 945 @Override 946 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 947 Log.i(TAG, "onOutputFormatChanged: " + format.toString()); 948 } 949 }; 950 951 private synchronized void requestStart() throws Exception { 952 mHandler = null; 953 start(); 954 // Wait for Hander allocation 955 synchronized (mThreadEvent) { 956 while (mHandler == null) { 957 mThreadEvent.wait(); 958 } 959 } 960 } 961 962 public void setAsyncHelper(MediaEncoderAsyncHelper helper) { 963 mHelper = helper; 964 } 965 966 @Override 967 public void run() { 968 Looper.prepare(); 969 synchronized (mThreadEvent) { 970 mHandler = new Handler(); 971 mThreadEvent.notify(); 972 } 973 Looper.loop(); 974 } 975 976 private void runCallable(final Callable<?> callable) throws Exception { 977 if (mAsync) { 978 final Exception[] exception = new Exception[1]; 979 final CountDownLatch countDownLatch = new CountDownLatch(1); 980 mHandler.post( new Runnable() { 981 @Override 982 public void run() { 983 try { 984 callable.call(); 985 } catch (Exception e) { 986 exception[0] = e; 987 } finally { 988 countDownLatch.countDown(); 989 } 990 } 991 } ); 992 993 // Wait for task completion 994 countDownLatch.await(); 995 if (exception[0] != null) { 996 throw exception[0]; 997 } 998 } else { 999 callable.call(); 1000 } 1001 } 1002 1003 private synchronized void requestStop() throws Exception { 1004 mHandler.post( new Runnable() { 1005 @Override 1006 public void run() { 1007 // This will run on the Looper thread 1008 Log.v(TAG, "MediaEncoder looper quitting"); 1009 Looper.myLooper().quitSafely(); 1010 } 1011 } ); 1012 // Wait for completion 1013 join(); 1014 mHandler = null; 1015 } 1016 1017 private void createCodecInternal(final String name, 1018 final MediaFormat format, final long timeout, boolean useNdk) throws Exception { 1019 mBufferInfo = new MediaCodec.BufferInfo(); 1020 mFrameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE); 1021 mTimeout = timeout; 1022 mInputFrameIndex = 0; 1023 mOutputFrameIndex = 0; 1024 mInPresentationTimeUs = 0; 1025 mOutPresentationTimeUs = 0; 1026 1027 if (useNdk) { 1028 mCodec = new NdkMediaCodec(name); 1029 } else { 1030 mCodec = new SdkMediaCodec(MediaCodec.createByCodecName(name), mAsync); 1031 } 1032 if (mAsync) { 1033 mCodec.setCallback(mCallback); 1034 } 1035 mCodec.configure(format, MediaCodec.CONFIGURE_FLAG_ENCODE); 1036 mCodec.start(); 1037 1038 // get the cached input/output only in sync mode 1039 if (!mAsync) { 1040 mInputBuffers = mCodec.getInputBuffers(); 1041 mOutputBuffers = mCodec.getOutputBuffers(); 1042 } 1043 } 1044 1045 public void createCodec(int id, final String name, final MediaFormat format, 1046 final long timeout, boolean async, final boolean useNdk) throws Exception { 1047 mId = id; 1048 mAsync = async; 1049 if (mAsync) { 1050 requestStart(); // start looper thread 1051 } 1052 runCallable( new Callable<Void>() { 1053 @Override 1054 public Void call() throws Exception { 1055 createCodecInternal(name, format, timeout, useNdk); 1056 return null; 1057 } 1058 } ); 1059 } 1060 1061 private void feedInputInternal(final byte[] encFrame, final boolean inputEOS) { 1062 mConsumedInput = false; 1063 // Feed input 1064 mInputBufIndex = mCodec.dequeueInputBuffer(mTimeout); 1065 1066 if (mInputBufIndex >= 0) { 1067 ByteBuffer inputBuffer = mCodec.getInputBuffer(mInputBufIndex); 1068 inputBuffer.clear(); 1069 inputBuffer.put(encFrame); 1070 inputBuffer.rewind(); 1071 int encFrameLength = encFrame.length; 1072 int flags = 0; 1073 if (inputEOS) { 1074 encFrameLength = 0; 1075 flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 1076 } 1077 if (!inputEOS) { 1078 Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex + 1079 ". InTime: " + (mInPresentationTimeUs + 500)/1000); 1080 mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate; 1081 mInputFrameIndex++; 1082 } 1083 1084 mCodec.queueInputBuffer( 1085 mInputBufIndex, 1086 0, // offset 1087 encFrameLength, // size 1088 mInPresentationTimeUs, 1089 flags); 1090 1091 mConsumedInput = true; 1092 } else { 1093 Log.v(TAG, "In " + mId + " - TRY_AGAIN_LATER"); 1094 } 1095 mCallbackReceived = false; 1096 } 1097 1098 public boolean feedInput(final byte[] encFrame, final boolean inputEOS) throws Exception { 1099 runCallable( new Callable<Void>() { 1100 @Override 1101 public Void call() throws Exception { 1102 feedInputInternal(encFrame, inputEOS); 1103 return null; 1104 } 1105 } ); 1106 return mConsumedInput; 1107 } 1108 1109 private void getOutputInternal() { 1110 mOutput = new MediaEncoderOutput(); 1111 mOutput.inPresentationTimeUs = mInPresentationTimeUs; 1112 mOutput.outPresentationTimeUs = mOutPresentationTimeUs; 1113 mOutput.outputGenerated = false; 1114 1115 // Get output from the encoder 1116 int result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout); 1117 while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || 1118 result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 1119 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 1120 mOutputBuffers = mCodec.getOutputBuffers(); 1121 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 1122 Log.d(TAG, "Format changed: " + mCodec.getOutputFormatString()); 1123 } 1124 result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout); 1125 } 1126 if (result == MediaCodec.INFO_TRY_AGAIN_LATER) { 1127 Log.v(TAG, "Out " + mId + " - TRY_AGAIN_LATER"); 1128 } 1129 1130 if (result >= 0) { 1131 int outputBufIndex = result; 1132 mOutput.buffer = new byte[mBufferInfo.size]; 1133 ByteBuffer outputBuffer = mCodec.getOutputBuffer(outputBufIndex); 1134 outputBuffer.position(mBufferInfo.offset); 1135 outputBuffer.get(mOutput.buffer, 0, mBufferInfo.size); 1136 mOutPresentationTimeUs = mBufferInfo.presentationTimeUs; 1137 1138 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex; 1139 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 1140 logStr += " CONFIG. "; 1141 } 1142 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) { 1143 logStr += " KEY. "; 1144 } 1145 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1146 logStr += " EOS. "; 1147 } 1148 logStr += " Size: " + mBufferInfo.size; 1149 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 + 1150 ". OutTime: " + (mOutPresentationTimeUs + 500)/1000; 1151 Log.v(TAG, logStr); 1152 if (mOutputFrameIndex == 0 && 1153 ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) == 0) ) { 1154 throw new RuntimeException("First frame is not a sync frame."); 1155 } 1156 1157 if (mBufferInfo.size > 0) { 1158 mOutputFrameIndex++; 1159 mOutput.outPresentationTimeUs = mOutPresentationTimeUs; 1160 } 1161 mCodec.releaseOutputBuffer(outputBufIndex, false); 1162 1163 mOutput.flags = mBufferInfo.flags; 1164 mOutput.outputGenerated = true; 1165 } 1166 mCallbackReceived = false; 1167 } 1168 1169 public MediaEncoderOutput getOutput() throws Exception { 1170 runCallable( new Callable<Void>() { 1171 @Override 1172 public Void call() throws Exception { 1173 getOutputInternal(); 1174 return null; 1175 } 1176 } ); 1177 return mOutput; 1178 } 1179 1180 public void forceSyncFrame() throws Exception { 1181 final Bundle syncFrame = new Bundle(); 1182 syncFrame.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); 1183 runCallable( new Callable<Void>() { 1184 @Override 1185 public Void call() throws Exception { 1186 mCodec.setParameters(syncFrame); 1187 return null; 1188 } 1189 } ); 1190 } 1191 1192 public void updateBitrate(int bitrate) throws Exception { 1193 final Bundle bitrateUpdate = new Bundle(); 1194 bitrateUpdate.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitrate); 1195 runCallable( new Callable<Void>() { 1196 @Override 1197 public Void call() throws Exception { 1198 mCodec.setParameters(bitrateUpdate); 1199 return null; 1200 } 1201 } ); 1202 } 1203 1204 1205 public void waitForBufferEvent() throws Exception { 1206 Log.v(TAG, "----Enc" + mId + " waiting for bufferEvent"); 1207 if (mAsync) { 1208 synchronized (mCallbackEvent) { 1209 if (!mCallbackReceived) { 1210 mCallbackEvent.wait(1000); // wait 1 sec for a callback 1211 // throw an exception if callback was not received 1212 if (!mCallbackReceived) { 1213 throw new RuntimeException("MediaCodec callback was not received"); 1214 } 1215 } 1216 } 1217 } else { 1218 Thread.sleep(5); 1219 } 1220 Log.v(TAG, "----Waiting for bufferEvent done"); 1221 } 1222 1223 1224 public void waitForCompletion(long timeoutMs) throws Exception { 1225 synchronized (mCompletionEvent) { 1226 long timeoutExpiredMs = System.currentTimeMillis() + timeoutMs; 1227 1228 while (!mCompleted) { 1229 mCompletionEvent.wait(timeoutExpiredMs - System.currentTimeMillis()); 1230 if (System.currentTimeMillis() >= timeoutExpiredMs) { 1231 throw new RuntimeException("encoding has timed out!"); 1232 } 1233 } 1234 } 1235 } 1236 1237 public void signalCompletion() { 1238 synchronized (mCompletionEvent) { 1239 mCompleted = true; 1240 mCompletionEvent.notify(); 1241 } 1242 } 1243 1244 public void deleteCodec() throws Exception { 1245 runCallable( new Callable<Void>() { 1246 @Override 1247 public Void call() throws Exception { 1248 mCodec.stop(); 1249 mCodec.release(); 1250 return null; 1251 } 1252 } ); 1253 if (mAsync) { 1254 requestStop(); // Stop looper thread 1255 } 1256 } 1257 } 1258 1259 /** 1260 * Vpx encoding loop supporting encoding single streams with an option 1261 * to run in a looper thread and use buffer ready notification callbacks. 1262 * 1263 * Output stream is described by encodingParams parameters. 1264 * 1265 * MediaCodec will raise an IllegalStateException 1266 * whenever vpx encoder fails to encode a frame. 1267 * 1268 * Color format of input file should be YUV420, and frameWidth, 1269 * frameHeight should be supplied correctly as raw input file doesn't 1270 * include any header data. 1271 * 1272 * @param streamParams Structure with encoder parameters 1273 * @return Returns array of encoded frames information for each frame. 1274 */ 1275 protected ArrayList<MediaCodec.BufferInfo> encode( 1276 EncoderOutputStreamParameters streamParams) throws Exception { 1277 1278 ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>(); 1279 Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " + 1280 streamParams.frameHeight); 1281 int bitrate = streamParams.bitrateSet[0]; 1282 1283 // Create minimal media format signifying desired output. 1284 MediaFormat format = MediaFormat.createVideoFormat( 1285 streamParams.codecMimeType, streamParams.frameWidth, 1286 streamParams.frameHeight); 1287 format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); 1288 CodecProperties properties = getVpxCodecProperties( 1289 true, format, streamParams.forceGoogleEncoder); 1290 if (properties == null) { 1291 return null; 1292 } 1293 1294 // Open input/output 1295 InputStream yuvStream = OpenFileOrResourceId( 1296 streamParams.inputYuvFilename, streamParams.inputResourceId); 1297 IvfWriter ivf = new IvfWriter( 1298 streamParams.outputIvfFilename, streamParams.codecMimeType, 1299 streamParams.frameWidth, streamParams.frameHeight); 1300 1301 // Create a media format signifying desired output. 1302 if (streamParams.bitrateType == VIDEO_ControlRateConstant) { 1303 format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR 1304 } 1305 if (streamParams.temporalLayers > 0) { 1306 format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer 1307 } 1308 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 1309 format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate); 1310 int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) / 1311 streamParams.frameRate; 1312 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval); 1313 1314 // Create encoder 1315 Log.d(TAG, "Creating encoder " + properties.codecName + 1316 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " + 1317 streamParams.frameWidth + " x " + streamParams.frameHeight + 1318 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType + 1319 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers + 1320 ". Key frame:" + syncFrameInterval * streamParams.frameRate + 1321 ". Force keyFrame: " + streamParams.syncForceFrameInterval); 1322 Log.d(TAG, " Format: " + format); 1323 Log.d(TAG, " Output ivf:" + streamParams.outputIvfFilename); 1324 MediaEncoderAsync codec = new MediaEncoderAsync(); 1325 codec.createCodec(0, properties.codecName, format, 1326 streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk); 1327 1328 // encode loop 1329 boolean sawInputEOS = false; // no more data 1330 boolean consumedInputEOS = false; // EOS flag is consumed dy encoder 1331 boolean sawOutputEOS = false; 1332 boolean inputConsumed = true; 1333 int inputFrameIndex = 0; 1334 int lastBitrate = bitrate; 1335 int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2; 1336 byte[] srcFrame = new byte[srcFrameSize]; 1337 1338 while (!sawOutputEOS) { 1339 1340 // Read and feed input frame 1341 if (!consumedInputEOS) { 1342 1343 // Read new input buffers - if previous input was consumed and no EOS 1344 if (inputConsumed && !sawInputEOS) { 1345 int bytesRead = yuvStream.read(srcFrame); 1346 1347 // Check EOS 1348 if (streamParams.frameCount > 0 && inputFrameIndex >= streamParams.frameCount) { 1349 sawInputEOS = true; 1350 Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex); 1351 } 1352 1353 if (!sawInputEOS && bytesRead == -1) { 1354 if (streamParams.frameCount == 0) { 1355 sawInputEOS = true; 1356 Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex); 1357 } else { 1358 yuvStream.close(); 1359 yuvStream = OpenFileOrResourceId( 1360 streamParams.inputYuvFilename, streamParams.inputResourceId); 1361 bytesRead = yuvStream.read(srcFrame); 1362 } 1363 } 1364 1365 // Force sync frame if syncForceFrameinterval is set. 1366 if (!sawInputEOS && inputFrameIndex > 0 && 1367 streamParams.syncForceFrameInterval > 0 && 1368 (inputFrameIndex % streamParams.syncForceFrameInterval) == 0) { 1369 Log.d(TAG, "---Requesting sync frame # " + inputFrameIndex); 1370 codec.forceSyncFrame(); 1371 } 1372 1373 // Dynamic bitrate change. 1374 if (!sawInputEOS && streamParams.bitrateSet.length > inputFrameIndex) { 1375 int newBitrate = streamParams.bitrateSet[inputFrameIndex]; 1376 if (newBitrate != lastBitrate) { 1377 Log.d(TAG, "--- Requesting new bitrate " + newBitrate + 1378 " for frame " + inputFrameIndex); 1379 codec.updateBitrate(newBitrate); 1380 lastBitrate = newBitrate; 1381 } 1382 } 1383 1384 // Convert YUV420 to NV12 if necessary 1385 if (properties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) { 1386 srcFrame = YUV420ToNV(streamParams.frameWidth, streamParams.frameHeight, 1387 srcFrame); 1388 } 1389 } 1390 1391 inputConsumed = codec.feedInput(srcFrame, sawInputEOS); 1392 if (inputConsumed) { 1393 inputFrameIndex++; 1394 consumedInputEOS = sawInputEOS; 1395 } 1396 } 1397 1398 // Get output from the encoder 1399 MediaEncoderOutput out = codec.getOutput(); 1400 if (out.outputGenerated) { 1401 // Detect output EOS 1402 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1403 Log.d(TAG, "----Output EOS "); 1404 sawOutputEOS = true; 1405 } 1406 1407 if (out.buffer.length > 0) { 1408 // Save frame 1409 ivf.writeFrame(out.buffer, out.outPresentationTimeUs); 1410 1411 // Update statistics - store presentation time delay in offset 1412 long presentationTimeUsDelta = out.inPresentationTimeUs - 1413 out.outPresentationTimeUs; 1414 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 1415 bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length, 1416 out.outPresentationTimeUs, out.flags); 1417 bufferInfos.add(bufferInfoCopy); 1418 } 1419 } 1420 1421 // If codec is not ready to accept input/poutput - wait for buffer ready callback 1422 if ((!inputConsumed || consumedInputEOS) && !out.outputGenerated) { 1423 codec.waitForBufferEvent(); 1424 } 1425 } 1426 1427 codec.deleteCodec(); 1428 ivf.close(); 1429 yuvStream.close(); 1430 1431 return bufferInfos; 1432 } 1433 1434 /** 1435 * Vpx encoding run in a looper thread and use buffer ready callbacks. 1436 * 1437 * Output stream is described by encodingParams parameters. 1438 * 1439 * MediaCodec will raise an IllegalStateException 1440 * whenever vpx encoder fails to encode a frame. 1441 * 1442 * Color format of input file should be YUV420, and frameWidth, 1443 * frameHeight should be supplied correctly as raw input file doesn't 1444 * include any header data. 1445 * 1446 * @param streamParams Structure with encoder parameters 1447 * @return Returns array of encoded frames information for each frame. 1448 */ 1449 protected ArrayList<MediaCodec.BufferInfo> encodeAsync( 1450 EncoderOutputStreamParameters streamParams) throws Exception { 1451 if (!streamParams.runInLooperThread) { 1452 throw new RuntimeException("encodeAsync should run with a looper thread!"); 1453 } 1454 1455 ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>(); 1456 Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " + 1457 streamParams.frameHeight); 1458 int bitrate = streamParams.bitrateSet[0]; 1459 1460 // Create minimal media format signifying desired output. 1461 MediaFormat format = MediaFormat.createVideoFormat( 1462 streamParams.codecMimeType, streamParams.frameWidth, 1463 streamParams.frameHeight); 1464 format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); 1465 CodecProperties properties = getVpxCodecProperties( 1466 true, format, streamParams.forceGoogleEncoder); 1467 if (properties == null) { 1468 return null; 1469 } 1470 1471 // Open input/output 1472 IvfWriter ivf = new IvfWriter( 1473 streamParams.outputIvfFilename, streamParams.codecMimeType, 1474 streamParams.frameWidth, streamParams.frameHeight); 1475 1476 // Create a media format signifying desired output. 1477 if (streamParams.bitrateType == VIDEO_ControlRateConstant) { 1478 format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR 1479 } 1480 if (streamParams.temporalLayers > 0) { 1481 format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer 1482 } 1483 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 1484 format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate); 1485 int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) / 1486 streamParams.frameRate; 1487 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval); 1488 1489 // Create encoder 1490 Log.d(TAG, "Creating encoder " + properties.codecName + 1491 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " + 1492 streamParams.frameWidth + " x " + streamParams.frameHeight + 1493 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType + 1494 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers + 1495 ". Key frame:" + syncFrameInterval * streamParams.frameRate + 1496 ". Force keyFrame: " + streamParams.syncForceFrameInterval); 1497 Log.d(TAG, " Format: " + format); 1498 Log.d(TAG, " Output ivf:" + streamParams.outputIvfFilename); 1499 1500 MediaEncoderAsync codec = new MediaEncoderAsync(); 1501 MediaEncoderAsyncHelper helper = new MediaEncoderAsyncHelper( 1502 streamParams, properties, bufferInfos, ivf); 1503 1504 codec.setAsyncHelper(helper); 1505 codec.createCodec(0, properties.codecName, format, 1506 streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk); 1507 codec.waitForCompletion(DEFAULT_ENCODE_TIMEOUT_MS); 1508 1509 codec.deleteCodec(); 1510 ivf.close(); 1511 1512 return bufferInfos; 1513 } 1514 1515 /** 1516 * Vpx encoding loop supporting encoding multiple streams at a time. 1517 * Each output stream is described by encodingParams parameters allowing 1518 * simultaneous encoding of various resolutions, bitrates with an option to 1519 * control key frame and dynamic bitrate for each output stream indepandently. 1520 * 1521 * MediaCodec will raise an IllegalStateException 1522 * whenever vpx encoder fails to encode a frame. 1523 * 1524 * Color format of input file should be YUV420, and frameWidth, 1525 * frameHeight should be supplied correctly as raw input file doesn't 1526 * include any header data. 1527 * 1528 * @param srcFrameWidth Frame width of input yuv file 1529 * @param srcFrameHeight Frame height of input yuv file 1530 * @param encodingParams Encoder parameters 1531 * @return Returns 2D array of encoded frames information for each stream and 1532 * for each frame. 1533 */ 1534 protected ArrayList<ArrayList<MediaCodec.BufferInfo>> encodeSimulcast( 1535 int srcFrameWidth, 1536 int srcFrameHeight, 1537 ArrayList<EncoderOutputStreamParameters> encodingParams) throws Exception { 1538 int numEncoders = encodingParams.size(); 1539 1540 // Create arrays of input/output, formats, bitrates etc 1541 ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos = 1542 new ArrayList<ArrayList<MediaCodec.BufferInfo>>(numEncoders); 1543 InputStream yuvStream[] = new InputStream[numEncoders]; 1544 IvfWriter[] ivf = new IvfWriter[numEncoders]; 1545 FileOutputStream[] yuvScaled = new FileOutputStream[numEncoders]; 1546 MediaFormat[] format = new MediaFormat[numEncoders]; 1547 MediaEncoderAsync[] codec = new MediaEncoderAsync[numEncoders]; 1548 int[] inputFrameIndex = new int[numEncoders]; 1549 boolean[] sawInputEOS = new boolean[numEncoders]; 1550 boolean[] consumedInputEOS = new boolean[numEncoders]; 1551 boolean[] inputConsumed = new boolean[numEncoders]; 1552 boolean[] bufferConsumed = new boolean[numEncoders]; 1553 boolean[] sawOutputEOS = new boolean[numEncoders]; 1554 byte[][] srcFrame = new byte[numEncoders][]; 1555 boolean sawOutputEOSTotal = false; 1556 boolean bufferConsumedTotal = false; 1557 CodecProperties[] codecProperties = new CodecProperties[numEncoders]; 1558 1559 numEncoders = 0; 1560 for (EncoderOutputStreamParameters params : encodingParams) { 1561 int i = numEncoders; 1562 Log.d(TAG, "Source resolution: " + params.frameWidth + " x " + 1563 params.frameHeight); 1564 int bitrate = params.bitrateSet[0]; 1565 1566 // Create minimal media format signifying desired output. 1567 format[i] = MediaFormat.createVideoFormat( 1568 params.codecMimeType, params.frameWidth, 1569 params.frameHeight); 1570 format[i].setInteger(MediaFormat.KEY_BIT_RATE, bitrate); 1571 CodecProperties properties = getVpxCodecProperties( 1572 true, format[i], params.forceGoogleEncoder); 1573 if (properties == null) { 1574 continue; 1575 } 1576 1577 // Check if scaled image was created 1578 int scale = params.frameWidth / srcFrameWidth; 1579 if (!mScaledImages.contains(scale)) { 1580 // resize image 1581 cacheScaledImage(params.inputYuvFilename, params.inputResourceId, 1582 srcFrameWidth, srcFrameHeight, 1583 params.scaledYuvFilename, params.frameWidth, params.frameHeight); 1584 mScaledImages.add(scale); 1585 } 1586 1587 // Create buffer info storage 1588 bufferInfos.add(new ArrayList<MediaCodec.BufferInfo>()); 1589 1590 // Create YUV reader 1591 yuvStream[i] = new FileInputStream(params.scaledYuvFilename); 1592 1593 // Create IVF writer 1594 ivf[i] = new IvfWriter( 1595 params.outputIvfFilename, params.codecMimeType, 1596 params.frameWidth, params.frameHeight); 1597 1598 // Frame buffer 1599 int frameSize = params.frameWidth * params.frameHeight * 3 / 2; 1600 srcFrame[i] = new byte[frameSize]; 1601 1602 // Create a media format signifying desired output. 1603 if (params.bitrateType == VIDEO_ControlRateConstant) { 1604 format[i].setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR 1605 } 1606 if (params.temporalLayers > 0) { 1607 format[i].setInteger("ts-layers", params.temporalLayers); // 1 temporal layer 1608 } 1609 format[i].setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 1610 format[i].setInteger(MediaFormat.KEY_FRAME_RATE, params.frameRate); 1611 int syncFrameInterval = (params.syncFrameInterval + params.frameRate/2) / 1612 params.frameRate; // in sec 1613 format[i].setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval); 1614 // Create encoder 1615 Log.d(TAG, "Creating encoder #" + i +" : " + properties.codecName + 1616 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " + 1617 params.frameWidth + " x " + params.frameHeight + 1618 ". Bitrate: " + bitrate + " Bitrate type: " + params.bitrateType + 1619 ". Fps:" + params.frameRate + ". TS Layers: " + params.temporalLayers + 1620 ". Key frame:" + syncFrameInterval * params.frameRate + 1621 ". Force keyFrame: " + params.syncForceFrameInterval); 1622 Log.d(TAG, " Format: " + format[i]); 1623 Log.d(TAG, " Output ivf:" + params.outputIvfFilename); 1624 1625 // Create encoder 1626 codec[i] = new MediaEncoderAsync(); 1627 codec[i].createCodec(i, properties.codecName, format[i], 1628 params.timeoutDequeue, params.runInLooperThread, params.useNdk); 1629 codecProperties[i] = new CodecProperties(properties.codecName, properties.colorFormat); 1630 1631 inputConsumed[i] = true; 1632 ++numEncoders; 1633 } 1634 if (numEncoders == 0) { 1635 Log.i(TAG, "no suitable encoders found for any of the streams"); 1636 return null; 1637 } 1638 1639 while (!sawOutputEOSTotal) { 1640 // Feed input buffer to all encoders 1641 for (int i = 0; i < numEncoders; i++) { 1642 bufferConsumed[i] = false; 1643 if (consumedInputEOS[i]) { 1644 continue; 1645 } 1646 1647 EncoderOutputStreamParameters params = encodingParams.get(i); 1648 // Read new input buffers - if previous input was consumed and no EOS 1649 if (inputConsumed[i] && !sawInputEOS[i]) { 1650 int bytesRead = yuvStream[i].read(srcFrame[i]); 1651 1652 // Check EOS 1653 if (params.frameCount > 0 && inputFrameIndex[i] >= params.frameCount) { 1654 sawInputEOS[i] = true; 1655 Log.d(TAG, "---Enc" + i + 1656 ". Sending EOS empty frame for frame # " + inputFrameIndex[i]); 1657 } 1658 1659 if (!sawInputEOS[i] && bytesRead == -1) { 1660 if (params.frameCount == 0) { 1661 sawInputEOS[i] = true; 1662 Log.d(TAG, "---Enc" + i + 1663 ". Sending EOS empty frame for frame # " + inputFrameIndex[i]); 1664 } else { 1665 yuvStream[i].close(); 1666 yuvStream[i] = new FileInputStream(params.scaledYuvFilename); 1667 bytesRead = yuvStream[i].read(srcFrame[i]); 1668 } 1669 } 1670 1671 // Convert YUV420 to NV12 if necessary 1672 if (codecProperties[i].colorFormat != 1673 CodecCapabilities.COLOR_FormatYUV420Planar) { 1674 srcFrame[i] = 1675 YUV420ToNV(params.frameWidth, params.frameHeight, srcFrame[i]); 1676 } 1677 } 1678 1679 inputConsumed[i] = codec[i].feedInput(srcFrame[i], sawInputEOS[i]); 1680 if (inputConsumed[i]) { 1681 inputFrameIndex[i]++; 1682 consumedInputEOS[i] = sawInputEOS[i]; 1683 bufferConsumed[i] = true; 1684 } 1685 1686 } 1687 1688 // Get output from all encoders 1689 for (int i = 0; i < numEncoders; i++) { 1690 if (sawOutputEOS[i]) { 1691 continue; 1692 } 1693 1694 MediaEncoderOutput out = codec[i].getOutput(); 1695 if (out.outputGenerated) { 1696 bufferConsumed[i] = true; 1697 // Detect output EOS 1698 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1699 Log.d(TAG, "----Enc" + i + ". Output EOS "); 1700 sawOutputEOS[i] = true; 1701 } 1702 1703 if (out.buffer.length > 0) { 1704 // Save frame 1705 ivf[i].writeFrame(out.buffer, out.outPresentationTimeUs); 1706 1707 // Update statistics - store presentation time delay in offset 1708 long presentationTimeUsDelta = out.inPresentationTimeUs - 1709 out.outPresentationTimeUs; 1710 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 1711 bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length, 1712 out.outPresentationTimeUs, out.flags); 1713 bufferInfos.get(i).add(bufferInfoCopy); 1714 } 1715 } 1716 } 1717 1718 // If codec is not ready to accept input/output - wait for buffer ready callback 1719 bufferConsumedTotal = false; 1720 for (boolean bufferConsumedCurrent : bufferConsumed) { 1721 bufferConsumedTotal |= bufferConsumedCurrent; 1722 } 1723 if (!bufferConsumedTotal) { 1724 // Pick the encoder to wait for 1725 for (int i = 0; i < numEncoders; i++) { 1726 if (!bufferConsumed[i] && !sawOutputEOS[i]) { 1727 codec[i].waitForBufferEvent(); 1728 break; 1729 } 1730 } 1731 } 1732 1733 // Check if EOS happened for all encoders 1734 sawOutputEOSTotal = true; 1735 for (boolean sawOutputEOSStream : sawOutputEOS) { 1736 sawOutputEOSTotal &= sawOutputEOSStream; 1737 } 1738 } 1739 1740 for (int i = 0; i < numEncoders; i++) { 1741 codec[i].deleteCodec(); 1742 ivf[i].close(); 1743 yuvStream[i].close(); 1744 if (yuvScaled[i] != null) { 1745 yuvScaled[i].close(); 1746 } 1747 } 1748 1749 return bufferInfos; 1750 } 1751 1752 /** 1753 * Some encoding statistics. 1754 */ 1755 protected class VpxEncodingStatistics { 1756 VpxEncodingStatistics() { 1757 mBitrates = new ArrayList<Integer>(); 1758 mFrames = new ArrayList<Integer>(); 1759 mKeyFrames = new ArrayList<Integer>(); 1760 mMinimumKeyFrameInterval = Integer.MAX_VALUE; 1761 } 1762 1763 public ArrayList<Integer> mBitrates;// Bitrate values for each second of the encoded stream. 1764 public ArrayList<Integer> mFrames; // Number of frames in each second of the encoded stream. 1765 public int mAverageBitrate; // Average stream bitrate. 1766 public ArrayList<Integer> mKeyFrames;// Stores the position of key frames in a stream. 1767 public int mAverageKeyFrameInterval; // Average key frame interval. 1768 public int mMaximumKeyFrameInterval; // Maximum key frame interval. 1769 public int mMinimumKeyFrameInterval; // Minimum key frame interval. 1770 } 1771 1772 /** 1773 * Calculates average bitrate and key frame interval for the encoded streams. 1774 * Output mBitrates field will contain bitrate values for every second 1775 * of the encoded stream. 1776 * Average stream bitrate will be stored in mAverageBitrate field. 1777 * mKeyFrames array will contain the position of key frames in the encoded stream and 1778 * mKeyFrameInterval - average key frame interval. 1779 */ 1780 protected VpxEncodingStatistics computeEncodingStatistics(int encoderId, 1781 ArrayList<MediaCodec.BufferInfo> bufferInfos ) { 1782 VpxEncodingStatistics statistics = new VpxEncodingStatistics(); 1783 1784 int totalSize = 0; 1785 int frames = 0; 1786 int framesPerSecond = 0; 1787 int totalFrameSizePerSecond = 0; 1788 int maxFrameSize = 0; 1789 int currentSecond; 1790 int nextSecond = 0; 1791 String keyFrameList = " IFrame List: "; 1792 String bitrateList = " Bitrate list: "; 1793 String framesList = " FPS list: "; 1794 1795 1796 for (int j = 0; j < bufferInfos.size(); j++) { 1797 MediaCodec.BufferInfo info = bufferInfos.get(j); 1798 currentSecond = (int)(info.presentationTimeUs / 1000000); 1799 boolean lastFrame = (j == bufferInfos.size() - 1); 1800 if (!lastFrame) { 1801 nextSecond = (int)(bufferInfos.get(j+1).presentationTimeUs / 1000000); 1802 } 1803 1804 totalSize += info.size; 1805 totalFrameSizePerSecond += info.size; 1806 maxFrameSize = Math.max(maxFrameSize, info.size); 1807 framesPerSecond++; 1808 frames++; 1809 1810 // Update the bitrate statistics if the next frame will 1811 // be for the next second 1812 if (lastFrame || nextSecond > currentSecond) { 1813 int currentBitrate = totalFrameSizePerSecond * 8; 1814 bitrateList += (currentBitrate + " "); 1815 framesList += (framesPerSecond + " "); 1816 statistics.mBitrates.add(currentBitrate); 1817 statistics.mFrames.add(framesPerSecond); 1818 totalFrameSizePerSecond = 0; 1819 framesPerSecond = 0; 1820 } 1821 1822 // Update key frame statistics. 1823 if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) { 1824 statistics.mKeyFrames.add(j); 1825 keyFrameList += (j + " "); 1826 } 1827 } 1828 int duration = (int)(bufferInfos.get(bufferInfos.size() - 1).presentationTimeUs / 1000); 1829 duration = (duration + 500) / 1000; 1830 statistics.mAverageBitrate = (int)(((long)totalSize * 8) / duration); 1831 Log.d(TAG, "Statistics for encoder # " + encoderId); 1832 // Calculate average key frame interval in frames. 1833 int keyFrames = statistics.mKeyFrames.size(); 1834 if (keyFrames > 1) { 1835 statistics.mAverageKeyFrameInterval = 1836 statistics.mKeyFrames.get(keyFrames - 1) - statistics.mKeyFrames.get(0); 1837 statistics.mAverageKeyFrameInterval = 1838 Math.round((float)statistics.mAverageKeyFrameInterval / (keyFrames - 1)); 1839 for (int j = 1; j < keyFrames; j++) { 1840 int keyFrameInterval = 1841 statistics.mKeyFrames.get(j) - statistics.mKeyFrames.get(j - 1); 1842 statistics.mMaximumKeyFrameInterval = 1843 Math.max(statistics.mMaximumKeyFrameInterval, keyFrameInterval); 1844 statistics.mMinimumKeyFrameInterval = 1845 Math.min(statistics.mMinimumKeyFrameInterval, keyFrameInterval); 1846 } 1847 Log.d(TAG, " Key frame intervals: Max: " + statistics.mMaximumKeyFrameInterval + 1848 ". Min: " + statistics.mMinimumKeyFrameInterval + 1849 ". Avg: " + statistics.mAverageKeyFrameInterval); 1850 } 1851 Log.d(TAG, " Frames: " + frames + ". Duration: " + duration + 1852 ". Total size: " + totalSize + ". Key frames: " + keyFrames); 1853 Log.d(TAG, keyFrameList); 1854 Log.d(TAG, bitrateList); 1855 Log.d(TAG, framesList); 1856 Log.d(TAG, " Bitrate average: " + statistics.mAverageBitrate); 1857 Log.d(TAG, " Maximum frame size: " + maxFrameSize); 1858 1859 return statistics; 1860 } 1861 1862 protected VpxEncodingStatistics computeEncodingStatistics( 1863 ArrayList<MediaCodec.BufferInfo> bufferInfos ) { 1864 return computeEncodingStatistics(0, bufferInfos); 1865 } 1866 1867 protected ArrayList<VpxEncodingStatistics> computeSimulcastEncodingStatistics( 1868 ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos) { 1869 int numCodecs = bufferInfos.size(); 1870 ArrayList<VpxEncodingStatistics> statistics = new ArrayList<VpxEncodingStatistics>(); 1871 1872 for (int i = 0; i < numCodecs; i++) { 1873 VpxEncodingStatistics currentStatistics = 1874 computeEncodingStatistics(i, bufferInfos.get(i)); 1875 statistics.add(currentStatistics); 1876 } 1877 return statistics; 1878 } 1879 1880 /** 1881 * Calculates maximum latency for encoder/decoder based on buffer info array 1882 * generated either by encoder or decoder. 1883 */ 1884 protected int maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos) { 1885 int maxValue = 0; 1886 for (MediaCodec.BufferInfo bufferInfo : bufferInfos) { 1887 maxValue = Math.max(maxValue, bufferInfo.offset); 1888 } 1889 maxValue = (maxValue + 500) / 1000; // mcs -> ms 1890 return maxValue; 1891 } 1892 1893 /** 1894 * Decoding PSNR statistics. 1895 */ 1896 protected class VpxDecodingStatistics { 1897 VpxDecodingStatistics() { 1898 mMinimumPSNR = Integer.MAX_VALUE; 1899 } 1900 public double mAveragePSNR; 1901 public double mMinimumPSNR; 1902 } 1903 1904 /** 1905 * Calculates PSNR value between two video frames. 1906 */ 1907 private double computePSNR(byte[] data0, byte[] data1) { 1908 long squareError = 0; 1909 assertTrue(data0.length == data1.length); 1910 int length = data0.length; 1911 for (int i = 0 ; i < length; i++) { 1912 int diff = ((int)data0[i] & 0xff) - ((int)data1[i] & 0xff); 1913 squareError += diff * diff; 1914 } 1915 double meanSquareError = (double)squareError / length; 1916 double psnr = 10 * Math.log10((double)255 * 255 / meanSquareError); 1917 return psnr; 1918 } 1919 1920 /** 1921 * Calculates average and minimum PSNR values between 1922 * set of reference and decoded video frames. 1923 * Runs PSNR calculation for the full duration of the decoded data. 1924 */ 1925 protected VpxDecodingStatistics computeDecodingStatistics( 1926 String referenceYuvFilename, 1927 int referenceYuvRawId, 1928 String decodedYuvFilename, 1929 int width, 1930 int height) throws Exception { 1931 VpxDecodingStatistics statistics = new VpxDecodingStatistics(); 1932 InputStream referenceStream = 1933 OpenFileOrResourceId(referenceYuvFilename, referenceYuvRawId); 1934 InputStream decodedStream = new FileInputStream(decodedYuvFilename); 1935 1936 int ySize = width * height; 1937 int uvSize = width * height / 4; 1938 byte[] yRef = new byte[ySize]; 1939 byte[] yDec = new byte[ySize]; 1940 byte[] uvRef = new byte[uvSize]; 1941 byte[] uvDec = new byte[uvSize]; 1942 1943 int frames = 0; 1944 double averageYPSNR = 0; 1945 double averageUPSNR = 0; 1946 double averageVPSNR = 0; 1947 double minimumYPSNR = Integer.MAX_VALUE; 1948 double minimumUPSNR = Integer.MAX_VALUE; 1949 double minimumVPSNR = Integer.MAX_VALUE; 1950 int minimumPSNRFrameIndex = 0; 1951 1952 while (true) { 1953 // Calculate Y PSNR. 1954 int bytesReadRef = referenceStream.read(yRef); 1955 int bytesReadDec = decodedStream.read(yDec); 1956 if (bytesReadDec == -1) { 1957 break; 1958 } 1959 if (bytesReadRef == -1) { 1960 // Reference file wrapping up 1961 referenceStream.close(); 1962 referenceStream = 1963 OpenFileOrResourceId(referenceYuvFilename, referenceYuvRawId); 1964 bytesReadRef = referenceStream.read(yRef); 1965 } 1966 double curYPSNR = computePSNR(yRef, yDec); 1967 averageYPSNR += curYPSNR; 1968 minimumYPSNR = Math.min(minimumYPSNR, curYPSNR); 1969 double curMinimumPSNR = curYPSNR; 1970 1971 // Calculate U PSNR. 1972 bytesReadRef = referenceStream.read(uvRef); 1973 bytesReadDec = decodedStream.read(uvDec); 1974 double curUPSNR = computePSNR(uvRef, uvDec); 1975 averageUPSNR += curUPSNR; 1976 minimumUPSNR = Math.min(minimumUPSNR, curUPSNR); 1977 curMinimumPSNR = Math.min(curMinimumPSNR, curUPSNR); 1978 1979 // Calculate V PSNR. 1980 bytesReadRef = referenceStream.read(uvRef); 1981 bytesReadDec = decodedStream.read(uvDec); 1982 double curVPSNR = computePSNR(uvRef, uvDec); 1983 averageVPSNR += curVPSNR; 1984 minimumVPSNR = Math.min(minimumVPSNR, curVPSNR); 1985 curMinimumPSNR = Math.min(curMinimumPSNR, curVPSNR); 1986 1987 // Frame index for minimum PSNR value - help to detect possible distortions 1988 if (curMinimumPSNR < statistics.mMinimumPSNR) { 1989 statistics.mMinimumPSNR = curMinimumPSNR; 1990 minimumPSNRFrameIndex = frames; 1991 } 1992 1993 String logStr = String.format(Locale.US, "PSNR #%d: Y: %.2f. U: %.2f. V: %.2f", 1994 frames, curYPSNR, curUPSNR, curVPSNR); 1995 Log.v(TAG, logStr); 1996 1997 frames++; 1998 } 1999 2000 averageYPSNR /= frames; 2001 averageUPSNR /= frames; 2002 averageVPSNR /= frames; 2003 statistics.mAveragePSNR = (4 * averageYPSNR + averageUPSNR + averageVPSNR) / 6; 2004 2005 Log.d(TAG, "PSNR statistics for " + frames + " frames."); 2006 String logStr = String.format(Locale.US, 2007 "Average PSNR: Y: %.1f. U: %.1f. V: %.1f. Average: %.1f", 2008 averageYPSNR, averageUPSNR, averageVPSNR, statistics.mAveragePSNR); 2009 Log.d(TAG, logStr); 2010 logStr = String.format(Locale.US, 2011 "Minimum PSNR: Y: %.1f. U: %.1f. V: %.1f. Overall: %.1f at frame %d", 2012 minimumYPSNR, minimumUPSNR, minimumVPSNR, 2013 statistics.mMinimumPSNR, minimumPSNRFrameIndex); 2014 Log.d(TAG, logStr); 2015 2016 referenceStream.close(); 2017 decodedStream.close(); 2018 return statistics; 2019 } 2020 } 2021 2022