1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import android.media.cts.R; 20 21 import android.content.Context; 22 import android.content.pm.PackageManager; 23 import android.content.res.AssetFileDescriptor; 24 import android.media.MediaCodec; 25 import android.media.MediaCodecInfo; 26 import android.media.MediaCodecInfo.CodecCapabilities; 27 import android.media.MediaCodecInfo.CodecProfileLevel; 28 import android.media.MediaCodecList; 29 import android.media.MediaExtractor; 30 import android.media.MediaFormat; 31 import android.platform.test.annotations.AppModeFull; 32 import android.util.Log; 33 import android.view.Surface; 34 35 import com.android.compatibility.common.util.MediaUtils; 36 37 import android.opengl.GLES20; 38 import javax.microedition.khronos.opengles.GL10; 39 40 import java.io.IOException; 41 import java.lang.System; 42 import java.nio.ByteBuffer; 43 import java.util.ArrayList; 44 import java.util.Arrays; 45 import java.util.List; 46 import java.util.Locale; 47 import java.util.Vector; 48 import java.util.zip.CRC32; 49 50 @AppModeFull 51 public class AdaptivePlaybackTest extends MediaPlayerTestBase { 52 private static final String TAG = "AdaptivePlaybackTest"; 53 private boolean sanity = false; 54 private static final int MIN_FRAMES_BEFORE_DRC = 2; 55 56 public Iterable<Codec> H264(CodecFactory factory) { 57 return factory.createCodecList( 58 mContext, 59 MediaFormat.MIMETYPE_VIDEO_AVC, 60 R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz, 61 R.raw.video_1280x720_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz, 62 R.raw.bbb_s1_720x480_mp4_h264_mp3_2mbps_30fps_aac_lc_5ch_320kbps_48000hz); 63 } 64 65 public Iterable<Codec> HEVC(CodecFactory factory) { 66 return factory.createCodecList( 67 mContext, 68 MediaFormat.MIMETYPE_VIDEO_HEVC, 69 R.raw.bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz, 70 R.raw.bbb_s4_1280x720_mp4_hevc_mp31_4mbps_30fps_aac_he_stereo_80kbps_32000hz, 71 R.raw.bbb_s1_352x288_mp4_hevc_mp2_600kbps_30fps_aac_he_stereo_96kbps_48000hz); 72 } 73 74 public Iterable<Codec> H263(CodecFactory factory) { 75 return factory.createCodecList( 76 mContext, 77 MediaFormat.MIMETYPE_VIDEO_H263, 78 R.raw.video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz, 79 R.raw.video_352x288_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz); 80 } 81 82 public Iterable<Codec> Mpeg4(CodecFactory factory) { 83 return factory.createCodecList( 84 mContext, 85 MediaFormat.MIMETYPE_VIDEO_MPEG4, 86 R.raw.video_1280x720_mp4_mpeg4_1000kbps_25fps_aac_stereo_128kbps_44100hz, 87 R.raw.video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz, 88 R.raw.video_176x144_mp4_mpeg4_300kbps_25fps_aac_stereo_128kbps_44100hz); 89 } 90 91 public Iterable<Codec> VP8(CodecFactory factory) { 92 return factory.createCodecList( 93 mContext, 94 MediaFormat.MIMETYPE_VIDEO_VP8, 95 R.raw.video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz, 96 R.raw.bbb_s3_1280x720_webm_vp8_8mbps_60fps_opus_6ch_384kbps_48000hz, 97 R.raw.bbb_s1_320x180_webm_vp8_800kbps_30fps_opus_5ch_320kbps_48000hz); 98 } 99 100 public Iterable<Codec> VP9(CodecFactory factory) { 101 return factory.createCodecList( 102 mContext, 103 MediaFormat.MIMETYPE_VIDEO_VP9, 104 R.raw.video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz, 105 R.raw.bbb_s4_1280x720_webm_vp9_0p31_4mbps_30fps_opus_stereo_128kbps_48000hz, 106 R.raw.bbb_s1_320x180_webm_vp9_0p11_600kbps_30fps_vorbis_mono_64kbps_48000hz); 107 } 108 109 CodecFactory ALL = new CodecFactory(); 110 CodecFactory SW = new SWCodecFactory(); 111 CodecFactory HW = new HWCodecFactory(); 112 113 public Iterable<Codec> H264() { return H264(ALL); } 114 public Iterable<Codec> HEVC() { return HEVC(ALL); } 115 public Iterable<Codec> VP8() { return VP8(ALL); } 116 public Iterable<Codec> VP9() { return VP9(ALL); } 117 public Iterable<Codec> Mpeg4() { return Mpeg4(ALL); } 118 public Iterable<Codec> H263() { return H263(ALL); } 119 120 public Iterable<Codec> AllCodecs() { 121 return chain(H264(ALL), HEVC(ALL), VP8(ALL), VP9(ALL), Mpeg4(ALL), H263(ALL)); 122 } 123 124 public Iterable<Codec> SWCodecs() { 125 return chain(H264(SW), HEVC(SW), VP8(SW), VP9(SW), Mpeg4(SW), H263(SW)); 126 } 127 128 public Iterable<Codec> HWCodecs() { 129 return chain(H264(HW), HEVC(HW), VP8(HW), VP9(HW), Mpeg4(HW), H263(HW)); 130 } 131 132 /* tests for adaptive codecs */ 133 Test adaptiveEarlyEos = new EarlyEosTest().adaptive(); 134 Test adaptiveEosFlushSeek = new EosFlushSeekTest().adaptive(); 135 Test adaptiveSkipAhead = new AdaptiveSkipTest(true /* forward */); 136 Test adaptiveSkipBack = new AdaptiveSkipTest(false /* forward */); 137 138 /* DRC tests for adaptive codecs */ 139 Test adaptiveReconfigDrc = new ReconfigDrcTest().adaptive(); 140 Test adaptiveSmallReconfigDrc = new ReconfigDrcTest().adaptiveSmall(); 141 Test adaptiveDrc = new AdaptiveDrcTest(); /* adaptive */ 142 Test adaptiveSmallDrc = new AdaptiveDrcTest().adaptiveSmall(); 143 144 /* tests for regular codecs */ 145 Test earlyEos = new EarlyEosTest(); 146 Test eosFlushSeek = new EosFlushSeekTest(); 147 Test flushConfigureDrc = new ReconfigDrcTest(); 148 149 Test[] allTests = { 150 adaptiveEarlyEos, 151 adaptiveEosFlushSeek, 152 adaptiveSkipAhead, 153 adaptiveSkipBack, 154 adaptiveReconfigDrc, 155 adaptiveSmallReconfigDrc, 156 adaptiveDrc, 157 adaptiveSmallDrc, 158 earlyEos, 159 eosFlushSeek, 160 flushConfigureDrc, 161 }; 162 163 /* helpers to run sets of tests */ 164 public void runEOS() { ex(AllCodecs(), new Test[] { 165 adaptiveEarlyEos, 166 adaptiveEosFlushSeek, 167 adaptiveReconfigDrc, 168 adaptiveSmallReconfigDrc, 169 earlyEos, 170 eosFlushSeek, 171 flushConfigureDrc, 172 }); } 173 174 public void runAll() { ex(AllCodecs(), allTests); } 175 public void runSW() { ex(SWCodecs(), allTests); } 176 public void runHW() { ex(HWCodecs(), allTests); } 177 178 public void sanityAll() { sanity = true; try { runAll(); } finally { sanity = false; } } 179 public void sanitySW() { sanity = true; try { runSW(); } finally { sanity = false; } } 180 public void sanityHW() { sanity = true; try { runHW(); } finally { sanity = false; } } 181 182 public void runH264() { ex(H264(), allTests); } 183 public void runHEVC() { ex(HEVC(), allTests); } 184 public void runVP8() { ex(VP8(), allTests); } 185 public void runVP9() { ex(VP9(), allTests); } 186 public void runMpeg4() { ex(Mpeg4(), allTests); } 187 public void runH263() { ex(H263(), allTests); } 188 189 public void onlyH264HW() { ex(H264(HW), allTests); } 190 public void onlyHEVCHW() { ex(HEVC(HW), allTests); } 191 public void onlyVP8HW() { ex(VP8(HW), allTests); } 192 public void onlyVP9HW() { ex(VP9(HW), allTests); } 193 public void onlyMpeg4HW() { ex(Mpeg4(HW), allTests); } 194 public void onlyH263HW() { ex(H263(HW), allTests); } 195 196 public void onlyH264SW() { ex(H264(SW), allTests); } 197 public void onlyHEVCSW() { ex(HEVC(SW), allTests); } 198 public void onlyVP8SW() { ex(VP8(SW), allTests); } 199 public void onlyVP9SW() { ex(VP9(SW), allTests); } 200 public void onlyMpeg4SW() { ex(Mpeg4(SW), allTests); } 201 public void onlyH263SW() { ex(H263(SW), allTests); } 202 203 public void bytebuffer() { ex(H264(SW), new EarlyEosTest().byteBuffer()); } 204 public void onlyTexture() { ex(H264(HW), new EarlyEosTest().texture()); } 205 206 /* inidividual tests */ 207 public void testH264_adaptiveEarlyEos() { ex(H264(), adaptiveEarlyEos); } 208 public void testHEVC_adaptiveEarlyEos() { ex(HEVC(), adaptiveEarlyEos); } 209 public void testVP8_adaptiveEarlyEos() { ex(VP8(), adaptiveEarlyEos); } 210 public void testVP9_adaptiveEarlyEos() { ex(VP9(), adaptiveEarlyEos); } 211 public void testMpeg4_adaptiveEarlyEos() { ex(Mpeg4(), adaptiveEarlyEos); } 212 public void testH263_adaptiveEarlyEos() { ex(H263(), adaptiveEarlyEos); } 213 214 public void testH264_adaptiveEosFlushSeek() { ex(H264(), adaptiveEosFlushSeek); } 215 public void testHEVC_adaptiveEosFlushSeek() { ex(HEVC(), adaptiveEosFlushSeek); } 216 public void testVP8_adaptiveEosFlushSeek() { ex(VP8(), adaptiveEosFlushSeek); } 217 public void testVP9_adaptiveEosFlushSeek() { ex(VP9(), adaptiveEosFlushSeek); } 218 public void testMpeg4_adaptiveEosFlushSeek() { ex(Mpeg4(), adaptiveEosFlushSeek); } 219 public void testH263_adaptiveEosFlushSeek() { ex(H263(), adaptiveEosFlushSeek); } 220 221 public void testH264_adaptiveSkipAhead() { ex(H264(), adaptiveSkipAhead); } 222 public void testHEVC_adaptiveSkipAhead() { ex(HEVC(), adaptiveSkipAhead); } 223 public void testVP8_adaptiveSkipAhead() { ex(VP8(), adaptiveSkipAhead); } 224 public void testVP9_adaptiveSkipAhead() { ex(VP9(), adaptiveSkipAhead); } 225 public void testMpeg4_adaptiveSkipAhead() { ex(Mpeg4(), adaptiveSkipAhead); } 226 public void testH263_adaptiveSkipAhead() { ex(H263(), adaptiveSkipAhead); } 227 228 public void testH264_adaptiveSkipBack() { ex(H264(), adaptiveSkipBack); } 229 public void testHEVC_adaptiveSkipBack() { ex(HEVC(), adaptiveSkipBack); } 230 public void testVP8_adaptiveSkipBack() { ex(VP8(), adaptiveSkipBack); } 231 public void testVP9_adaptiveSkipBack() { ex(VP9(), adaptiveSkipBack); } 232 public void testMpeg4_adaptiveSkipBack() { ex(Mpeg4(), adaptiveSkipBack); } 233 public void testH263_adaptiveSkipBack() { ex(H263(), adaptiveSkipBack); } 234 235 public void testH264_adaptiveReconfigDrc() { ex(H264(), adaptiveReconfigDrc); } 236 public void testHEVC_adaptiveReconfigDrc() { ex(HEVC(), adaptiveReconfigDrc); } 237 public void testVP8_adaptiveReconfigDrc() { ex(VP8(), adaptiveReconfigDrc); } 238 public void testVP9_adaptiveReconfigDrc() { ex(VP9(), adaptiveReconfigDrc); } 239 public void testMpeg4_adaptiveReconfigDrc() { ex(Mpeg4(), adaptiveReconfigDrc); } 240 public void testH263_adaptiveReconfigDrc() { ex(H263(), adaptiveReconfigDrc); } 241 242 public void testH264_adaptiveSmallReconfigDrc() { ex(H264(), adaptiveSmallReconfigDrc); } 243 public void testHEVC_adaptiveSmallReconfigDrc() { ex(HEVC(), adaptiveSmallReconfigDrc); } 244 public void testVP8_adaptiveSmallReconfigDrc() { ex(VP8(), adaptiveSmallReconfigDrc); } 245 public void testVP9_adaptiveSmallReconfigDrc() { ex(VP9(), adaptiveSmallReconfigDrc); } 246 public void testMpeg4_adaptiveSmallReconfigDrc() { ex(Mpeg4(), adaptiveSmallReconfigDrc); } 247 public void testH263_adaptiveSmallReconfigDrc() { ex(H263(), adaptiveSmallReconfigDrc); } 248 249 public void testH264_adaptiveDrc() { ex(H264(), adaptiveDrc); } 250 public void testHEVC_adaptiveDrc() { ex(HEVC(), adaptiveDrc); } 251 public void testVP8_adaptiveDrc() { ex(VP8(), adaptiveDrc); } 252 public void testVP9_adaptiveDrc() { ex(VP9(), adaptiveDrc); } 253 public void testMpeg4_adaptiveDrc() { ex(Mpeg4(), adaptiveDrc); } 254 public void testH263_adaptiveDrc() { ex(H263(), adaptiveDrc); } 255 256 public void testH264_adaptiveDrcEarlyEos() { ex(H264(), new AdaptiveDrcEarlyEosTest()); } 257 public void testHEVC_adaptiveDrcEarlyEos() { ex(HEVC(), new AdaptiveDrcEarlyEosTest()); } 258 public void testVP8_adaptiveDrcEarlyEos() { ex(VP8(), new AdaptiveDrcEarlyEosTest()); } 259 public void testVP9_adaptiveDrcEarlyEos() { ex(VP9(), new AdaptiveDrcEarlyEosTest()); } 260 261 public void testH264_adaptiveSmallDrc() { ex(H264(), adaptiveSmallDrc); } 262 public void testHEVC_adaptiveSmallDrc() { ex(HEVC(), adaptiveSmallDrc); } 263 public void testVP8_adaptiveSmallDrc() { ex(VP8(), adaptiveSmallDrc); } 264 public void testVP9_adaptiveSmallDrc() { ex(VP9(), adaptiveSmallDrc); } 265 266 public void testH264_earlyEos() { ex(H264(), earlyEos); } 267 public void testHEVC_earlyEos() { ex(HEVC(), earlyEos); } 268 public void testVP8_earlyEos() { ex(VP8(), earlyEos); } 269 public void testVP9_earlyEos() { ex(VP9(), earlyEos); } 270 public void testMpeg4_earlyEos() { ex(Mpeg4(), earlyEos); } 271 public void testH263_earlyEos() { ex(H263(), earlyEos); } 272 273 public void testH264_eosFlushSeek() { ex(H264(), eosFlushSeek); } 274 public void testHEVC_eosFlushSeek() { ex(HEVC(), eosFlushSeek); } 275 public void testVP8_eosFlushSeek() { ex(VP8(), eosFlushSeek); } 276 public void testVP9_eosFlushSeek() { ex(VP9(), eosFlushSeek); } 277 public void testMpeg4_eosFlushSeek() { ex(Mpeg4(), eosFlushSeek); } 278 public void testH263_eosFlushSeek() { ex(H263(), eosFlushSeek); } 279 280 public void testH264_flushConfigureDrc() { ex(H264(), flushConfigureDrc); } 281 public void testHEVC_flushConfigureDrc() { ex(HEVC(), flushConfigureDrc); } 282 public void testVP8_flushConfigureDrc() { ex(VP8(), flushConfigureDrc); } 283 public void testVP9_flushConfigureDrc() { ex(VP9(), flushConfigureDrc); } 284 public void testMpeg4_flushConfigureDrc() { ex(Mpeg4(), flushConfigureDrc); } 285 public void testH263_flushConfigureDrc() { ex(H263(), flushConfigureDrc); } 286 287 /* only use unchecked exceptions to allow brief test methods */ 288 private void ex(Iterable<Codec> codecList, Test test) { 289 ex(codecList, new Test[] { test } ); 290 } 291 292 private void ex(Iterable<Codec> codecList, Test[] testList) { 293 if (codecList == null) { 294 Log.i(TAG, "CodecList was empty. Skipping test."); 295 return; 296 } 297 298 TestList tests = new TestList(); 299 for (Codec c : codecList) { 300 for (Test test : testList) { 301 if (test.isValid(c)) { 302 test.addTests(tests, c); 303 } 304 } 305 } 306 try { 307 tests.run(); 308 } catch (Throwable t) { 309 throw new RuntimeException(t); 310 } 311 } 312 313 /* need an inner class to have access to the activity */ 314 abstract class ActivityTest extends Test { 315 TestSurface mNullSurface = new ActivitySurface(null); 316 protected TestSurface getSurface() { 317 if (mUseSurface) { 318 return new ActivitySurface(getActivity().getSurfaceHolder().getSurface()); 319 } else if (mUseSurfaceTexture) { 320 return new DecoderSurface(1280, 720, mCRC); 321 } 322 return mNullSurface; 323 } 324 } 325 326 static final int NUM_FRAMES = 50; 327 328 /** 329 * Queue some frames with an EOS on the last one. Test that we have decoded as many 330 * frames as we queued. This tests the EOS handling of the codec to see if all queued 331 * (and out-of-order) frames are actually decoded and returned. 332 * 333 * Also test flushing prior to sending CSD, and immediately after sending CSD. 334 */ 335 class EarlyEosTest extends ActivityTest { 336 // using bitfields to create a directed state graph that terminates at FLUSH_NEVER 337 static final int FLUSH_BEFORE_CSD = (1 << 1); 338 static final int FLUSH_AFTER_CSD = (1 << 0); 339 static final int FLUSH_NEVER = 0; 340 341 public boolean isValid(Codec c) { 342 return getFormat(c) != null; 343 } 344 public void addTests(TestList tests, final Codec c) { 345 int state = FLUSH_BEFORE_CSD; 346 for (int i = NUM_FRAMES / 2; i > 0; --i, state >>= 1) { 347 final int queuedFrames = i; 348 final int earlyFlushMode = state; 349 tests.add( 350 new Step("testing early EOS at " + queuedFrames, this, c) { 351 public void run() { 352 Decoder decoder = new Decoder(c.name); 353 try { 354 MediaFormat fmt = stepFormat(); 355 MediaFormat configFmt = fmt; 356 if (earlyFlushMode == FLUSH_BEFORE_CSD) { 357 // flush before CSD requires not submitting CSD with configure 358 configFmt = Media.removeCSD(fmt); 359 } 360 decoder.configureAndStart(configFmt, stepSurface()); 361 if (earlyFlushMode != FLUSH_NEVER) { 362 decoder.flush(); 363 // We must always queue CSD after a flush that is potentially 364 // before we receive output format has changed. This should 365 // work even after we receive the format change. 366 decoder.queueCSD(fmt); 367 } 368 int decodedFrames = -decoder.queueInputBufferRange( 369 stepMedia(), 370 0 /* startFrame */, 371 queuedFrames, 372 true /* sendEos */, 373 true /* waitForEos */); 374 if (decodedFrames <= 0) { 375 Log.w(TAG, "Did not receive EOS -- negating frame count"); 376 } 377 decoder.stop(); 378 if (decodedFrames != queuedFrames) { 379 warn("decoded " + decodedFrames + " frames out of " + 380 queuedFrames + " queued"); 381 } 382 } finally { 383 warn(decoder.getWarnings()); 384 decoder.releaseQuietly(); 385 } 386 } 387 }); 388 if (sanity) { 389 i >>= 1; 390 } 391 } 392 } 393 }; 394 395 /** 396 * Similar to EarlyEosTest, but we keep the component alive and running in between the steps. 397 * This is how seeking should be done if all frames must be outputted. This also tests that 398 * PTS can be repeated after flush. 399 */ 400 class EosFlushSeekTest extends ActivityTest { 401 Decoder mDecoder; // test state 402 public boolean isValid(Codec c) { 403 return getFormat(c) != null; 404 } 405 public void addTests(TestList tests, final Codec c) { 406 tests.add( 407 new Step("testing EOS & flush before seek - init", this, c) { 408 public void run() { 409 mDecoder = new Decoder(c.name); 410 mDecoder.configureAndStart(stepFormat(), stepSurface()); 411 }}); 412 413 for (int i = NUM_FRAMES; i > 0; i--) { 414 final int queuedFrames = i; 415 tests.add( 416 new Step("testing EOS & flush before seeking after " + queuedFrames + 417 " frames", this, c) { 418 public void run() { 419 int decodedFrames = -mDecoder.queueInputBufferRange( 420 stepMedia(), 421 0 /* startFrame */, 422 queuedFrames, 423 true /* sendEos */, 424 true /* waitForEos */); 425 if (decodedFrames != queuedFrames) { 426 warn("decoded " + decodedFrames + " frames out of " + 427 queuedFrames + " queued"); 428 } 429 warn(mDecoder.getWarnings()); 430 mDecoder.clearWarnings(); 431 mDecoder.flush(); 432 } 433 }); 434 if (sanity) { 435 i >>= 1; 436 } 437 } 438 439 tests.add( 440 new Step("testing EOS & flush before seek - finally", this, c) { 441 public void run() { 442 try { 443 mDecoder.stop(); 444 } finally { 445 mDecoder.release(); 446 } 447 }}); 448 } 449 }; 450 451 /** 452 * Similar to EosFlushSeekTest, but we change the media size between the steps. 453 * This is how dynamic resolution switching can be done on codecs that do not support 454 * adaptive playback. 455 */ 456 class ReconfigDrcTest extends ActivityTest { 457 Decoder mDecoder; // test state 458 public boolean isValid(Codec c) { 459 return getFormat(c) != null && c.mediaList.length > 1; 460 } 461 public void addTests(TestList tests, final Codec c) { 462 tests.add( 463 new Step("testing DRC with reconfigure - init", this, c) { 464 public void run() { 465 mDecoder = new Decoder(c.name); 466 }}); 467 468 for (int i = NUM_FRAMES, ix = 0; i > 0; i--, ix++) { 469 final int queuedFrames = i; 470 final int mediaIx = ix % c.mediaList.length; 471 tests.add( 472 new Step("testing DRC with reconfigure after " + queuedFrames + " frames", 473 this, c, mediaIx) { 474 public void run() { 475 try { 476 mDecoder.configureAndStart(stepFormat(), stepSurface()); 477 int decodedFrames = -mDecoder.queueInputBufferRange( 478 stepMedia(), 479 0 /* startFrame */, 480 queuedFrames, 481 true /* sendEos */, 482 true /* waitForEos */); 483 if (decodedFrames != queuedFrames) { 484 warn("decoded " + decodedFrames + " frames out of " + 485 queuedFrames + " queued"); 486 } 487 warn(mDecoder.getWarnings()); 488 mDecoder.clearWarnings(); 489 mDecoder.flush(); 490 } finally { 491 mDecoder.stop(); 492 } 493 } 494 }); 495 if (sanity) { 496 i >>= 1; 497 } 498 } 499 tests.add( 500 new Step("testing DRC with reconfigure - finally", this, c) { 501 public void run() { 502 mDecoder.release(); 503 }}); 504 } 505 }; 506 507 /* ADAPTIVE-ONLY TESTS - only run on codecs that support adaptive playback */ 508 509 /** 510 * Test dynamic resolution change support. Queue various sized media segments 511 * with different resolutions, verify that all queued frames were decoded. Here 512 * PTS will grow between segments. 513 */ 514 class AdaptiveDrcTest extends ActivityTest { 515 Decoder mDecoder; 516 int mAdjustTimeUs; 517 int mDecodedFrames; 518 int mQueuedFrames; 519 520 public AdaptiveDrcTest() { 521 super(); 522 adaptive(); 523 } 524 public boolean isValid(Codec c) { 525 checkAdaptiveFormat(); 526 return c.adaptive && c.mediaList.length > 1; 527 } 528 public void addTests(TestList tests, final Codec c) { 529 tests.add( 530 new Step("testing DRC with no reconfigure - init", this, c) { 531 public void run() throws Throwable { 532 // FIXME wait 2 seconds to allow system to free up previous codecs 533 try { 534 Thread.sleep(2000); 535 } catch (InterruptedException e) {} 536 mDecoder = new Decoder(c.name); 537 mDecoder.configureAndStart(stepFormat(), stepSurface()); 538 mAdjustTimeUs = 0; 539 mDecodedFrames = 0; 540 mQueuedFrames = 0; 541 }}); 542 543 for (int i = NUM_FRAMES, ix = 0; i >= MIN_FRAMES_BEFORE_DRC; i--, ix++) { 544 final int mediaIx = ix % c.mediaList.length; 545 final int segmentSize = i; 546 tests.add( 547 new Step("testing DRC with no reconfigure after " + i + " frames", 548 this, c, mediaIx) { 549 public void run() throws Throwable { 550 mQueuedFrames += segmentSize; 551 boolean lastSequence = segmentSize == MIN_FRAMES_BEFORE_DRC; 552 if (sanity) { 553 lastSequence = (segmentSize >> 1) <= MIN_FRAMES_BEFORE_DRC; 554 } 555 int frames = mDecoder.queueInputBufferRange( 556 stepMedia(), 557 0 /* startFrame */, 558 segmentSize, 559 lastSequence /* sendEos */, 560 lastSequence /* expectEos */, 561 mAdjustTimeUs); 562 if (lastSequence && frames >= 0) { 563 warn("did not receive EOS, received " + frames + " frames"); 564 } else if (!lastSequence && frames < 0) { 565 warn("received EOS, received " + (-frames) + " frames"); 566 } 567 warn(mDecoder.getWarnings()); 568 mDecoder.clearWarnings(); 569 570 mDecodedFrames += Math.abs(frames); 571 mAdjustTimeUs += 1 + stepMedia().getTimestampRangeValue( 572 0, segmentSize, Media.RANGE_END); 573 }}); 574 if (sanity) { 575 i >>= 1; 576 } 577 } 578 tests.add( 579 new Step("testing DRC with no reconfigure - init", this, c) { 580 public void run() throws Throwable { 581 if (mDecodedFrames != mQueuedFrames) { 582 warn("decoded " + mDecodedFrames + " frames out of " + 583 mQueuedFrames + " queued"); 584 } 585 try { 586 mDecoder.stop(); 587 } finally { 588 mDecoder.release(); 589 } 590 } 591 }); 592 } 593 }; 594 595 /** 596 * Queue EOS shortly after a dynamic resolution change. Test that all frames were 597 * decoded. 598 */ 599 class AdaptiveDrcEarlyEosTest extends ActivityTest { 600 public AdaptiveDrcEarlyEosTest() { 601 super(); 602 adaptive(); 603 } 604 public boolean isValid(Codec c) { 605 checkAdaptiveFormat(); 606 return c.adaptive && c.mediaList.length > 1; 607 } 608 public Step testStep(final Codec c, final int framesBeforeDrc, 609 final int framesBeforeEos) { 610 return new Step("testing DRC with no reconfigure after " + framesBeforeDrc + 611 " frames and subsequent EOS after " + framesBeforeEos + " frames", 612 this, c) { 613 public void run() throws Throwable { 614 Decoder decoder = new Decoder(c.name); 615 int queuedFrames = framesBeforeDrc + framesBeforeEos; 616 int framesA = 0; 617 int framesB = 0; 618 try { 619 decoder.configureAndStart(stepFormat(), stepSurface()); 620 Media media = c.mediaList[0]; 621 622 framesA = decoder.queueInputBufferRange( 623 media, 624 0 /* startFrame */, 625 framesBeforeDrc, 626 false /* sendEos */, 627 false /* expectEos */); 628 if (framesA < 0) { 629 warn("received unexpected EOS, received " + (-framesA) + " frames"); 630 } 631 long adjustTimeUs = 1 + media.getTimestampRangeValue( 632 0, framesBeforeDrc, Media.RANGE_END); 633 634 media = c.mediaList[1]; 635 framesB = decoder.queueInputBufferRange( 636 media, 637 0 /* startFrame */, 638 framesBeforeEos, 639 true /* sendEos */, 640 true /* expectEos */, 641 adjustTimeUs); 642 if (framesB >= 0) { 643 warn("did not receive EOS, received " + (-framesB) + " frames"); 644 } 645 decoder.stop(); 646 warn(decoder.getWarnings()); 647 } finally { 648 int decodedFrames = Math.abs(framesA) + Math.abs(framesB); 649 if (decodedFrames != queuedFrames) { 650 warn("decoded " + decodedFrames + " frames out of " + queuedFrames + 651 " queued"); 652 } 653 decoder.release(); 654 } 655 } 656 }; 657 } 658 public void addTests(TestList tests, Codec c) { 659 for (int drcFrame = 6; drcFrame >= MIN_FRAMES_BEFORE_DRC; drcFrame--) { 660 for (int eosFrame = 6; eosFrame >= 1; eosFrame--) { 661 tests.add(testStep(c, drcFrame, eosFrame)); 662 } 663 } 664 } 665 }; 666 667 /** 668 * Similar to AdaptiveDrcTest, but tests that PTS can change at adaptive boundaries both 669 * forward and backward without the need to flush. 670 */ 671 class AdaptiveSkipTest extends ActivityTest { 672 boolean forward; 673 public AdaptiveSkipTest(boolean fwd) { 674 forward = fwd; 675 adaptive(); 676 } 677 public boolean isValid(Codec c) { 678 checkAdaptiveFormat(); 679 return c.adaptive; 680 } 681 Decoder mDecoder; 682 int mAdjustTimeUs = 0; 683 int mDecodedFrames = 0; 684 int mQueuedFrames = 0; 685 public void addTests(TestList tests, final Codec c) { 686 tests.add( 687 new Step("testing flushless skipping - init", this, c) { 688 public void run() throws Throwable { 689 mDecoder = new Decoder(c.name); 690 mDecoder.configureAndStart(stepFormat(), stepSurface()); 691 mAdjustTimeUs = 0; 692 mDecodedFrames = 0; 693 mQueuedFrames = 0; 694 }}); 695 696 for (int i = 2, ix = 0; i <= NUM_FRAMES; i++, ix++) { 697 final int mediaIx = ix % c.mediaList.length; 698 final int segmentSize = i; 699 final boolean lastSequence; 700 if (sanity) { 701 lastSequence = (segmentSize << 1) + 1 > NUM_FRAMES; 702 } else { 703 lastSequence = segmentSize >= NUM_FRAMES; 704 } 705 tests.add( 706 new Step("testing flushless skipping " + (forward ? "forward" : "backward") + 707 " after " + i + " frames", this, c) { 708 public void run() throws Throwable { 709 int frames = mDecoder.queueInputBufferRange( 710 stepMedia(), 711 0 /* startFrame */, 712 segmentSize, 713 lastSequence /* sendEos */, 714 lastSequence /* expectEos */, 715 mAdjustTimeUs); 716 if (lastSequence && frames >= 0) { 717 warn("did not receive EOS, received " + frames + " frames"); 718 } else if (!lastSequence && frames < 0) { 719 warn("received unexpected EOS, received " + (-frames) + " frames"); 720 } 721 warn(mDecoder.getWarnings()); 722 mDecoder.clearWarnings(); 723 724 mQueuedFrames += segmentSize; 725 mDecodedFrames += Math.abs(frames); 726 if (forward) { 727 mAdjustTimeUs += 10000000 + stepMedia().getTimestampRangeValue( 728 0, segmentSize, Media.RANGE_DURATION); 729 } 730 }}); 731 if (sanity) { 732 i <<= 1; 733 } 734 } 735 736 tests.add( 737 new Step("testing flushless skipping - finally", this, c) { 738 public void run() throws Throwable { 739 if (mDecodedFrames != mQueuedFrames) { 740 warn("decoded " + mDecodedFrames + " frames out of " + mQueuedFrames + 741 " queued"); 742 } 743 try { 744 mDecoder.stop(); 745 } finally { 746 mDecoder.release(); 747 } 748 }}); 749 } 750 }; 751 752 // not yet used 753 static long checksum(ByteBuffer buf, int size, CRC32 crc) { 754 assertTrue(size >= 0); 755 assertTrue(size <= buf.capacity()); 756 crc.reset(); 757 if (buf.hasArray()) { 758 crc.update(buf.array(), buf.arrayOffset(), size); 759 } else { 760 int pos = buf.position(); 761 buf.rewind(); 762 final int rdsize = Math.min(4096, size); 763 byte bb[] = new byte[rdsize]; 764 int chk; 765 for (int i = 0; i < size; i += chk) { 766 chk = Math.min(rdsize, size - i); 767 buf.get(bb, 0, chk); 768 crc.update(bb, 0, chk); 769 } 770 buf.position(pos); 771 } 772 return crc.getValue(); 773 } 774 775 CRC32 mCRC; 776 777 @Override 778 protected void setUp() throws Exception { 779 super.setUp(); 780 mCRC = new CRC32(); 781 } 782 783 /* ====================================================================== */ 784 /* UTILITY FUNCTIONS */ 785 /* ====================================================================== */ 786 static String byteBufferToString(ByteBuffer buf, int start, int len) { 787 int oldPosition = buf.position(); 788 buf.position(start); 789 int strlen = 2; // {} 790 boolean ellipsis = len < buf.limit(); 791 if (ellipsis) { 792 strlen += 3; // ... 793 } else { 794 len = buf.limit(); 795 } 796 strlen += 3 * len - (len > 0 ? 1 : 0); // XX,XX 797 char[] res = new char[strlen]; 798 res[0] = '{'; 799 res[strlen - 1] = '}'; 800 if (ellipsis) { 801 res[strlen - 2] = res[strlen - 3] = res[strlen - 4] = '.'; 802 } 803 for (int i = 1; i < len; i++) { 804 res[i * 3] = ','; 805 } 806 for (int i = 0; i < len; i++) { 807 byte b = buf.get(); 808 int d = (b >> 4) & 15; 809 res[i * 3 + 1] = (char)(d + (d > 9 ? 'a' - 10 : '0')); 810 d = (b & 15); 811 res[i * 3 + 2] = (char)(d + (d > 9 ? 'a' - 10 : '0')); 812 } 813 buf.position(oldPosition); 814 return new String(res); 815 } 816 817 static <E> Iterable<E> chain(Iterable<E> ... iterables) { 818 /* simple chainer using ArrayList */ 819 ArrayList<E> items = new ArrayList<E>(); 820 for (Iterable<E> it: iterables) { 821 for (E el: it) { 822 items.add(el); 823 } 824 } 825 return items; 826 } 827 828 class Decoder implements MediaCodec.OnFrameRenderedListener { 829 private final static String TAG = "AdaptiveDecoder"; 830 final long kTimeOutUs = 5000; 831 final long kCSDTimeOutUs = 1000000; 832 MediaCodec mCodec; 833 ByteBuffer[] mInputBuffers; 834 ByteBuffer[] mOutputBuffers; 835 TestSurface mSurface; 836 boolean mDoChecksum; 837 boolean mQueuedEos; 838 ArrayList<Long> mTimeStamps; 839 ArrayList<String> mWarnings; 840 Vector<Long> mRenderedTimeStamps; // using Vector as it is implicitly synchronized 841 long mLastRenderNanoTime; 842 int mFramesNotifiedRendered; 843 844 public Decoder(String codecName) { 845 MediaCodec codec = null; 846 try { 847 codec = MediaCodec.createByCodecName(codecName); 848 } catch (Exception e) { 849 throw new RuntimeException("couldn't create codec " + codecName, e); 850 } 851 Log.i(TAG, "using codec: " + codec.getName()); 852 mCodec = codec; 853 mDoChecksum = false; 854 mQueuedEos = false; 855 mTimeStamps = new ArrayList<Long>(); 856 mWarnings = new ArrayList<String>(); 857 mRenderedTimeStamps = new Vector<Long>(); 858 mLastRenderNanoTime = System.nanoTime(); 859 mFramesNotifiedRendered = 0; 860 861 codec.setOnFrameRenderedListener(this, null); 862 } 863 864 public void onFrameRendered(MediaCodec codec, long presentationTimeUs, long nanoTime) { 865 final long NSECS_IN_1SEC = 1000000000; 866 if (!mRenderedTimeStamps.remove(presentationTimeUs)) { 867 warn("invalid timestamp " + presentationTimeUs + ", queued " + 868 mRenderedTimeStamps); 869 } 870 assert nanoTime > mLastRenderNanoTime; 871 mLastRenderNanoTime = nanoTime; 872 ++mFramesNotifiedRendered; 873 assert nanoTime > System.nanoTime() - NSECS_IN_1SEC; 874 } 875 876 public String getName() { 877 return mCodec.getName(); 878 } 879 880 public Iterable<String> getWarnings() { 881 return mWarnings; 882 } 883 884 private void warn(String warning) { 885 mWarnings.add(warning); 886 Log.w(TAG, warning); 887 } 888 889 public void clearWarnings() { 890 mWarnings.clear(); 891 } 892 893 public void configureAndStart(MediaFormat format, TestSurface surface) { 894 mSurface = surface; 895 Log.i(TAG, "configure(" + format + ", " + mSurface.getSurface() + ")"); 896 mCodec.configure(format, mSurface.getSurface(), null /* crypto */, 0 /* flags */); 897 Log.i(TAG, "start"); 898 mCodec.start(); 899 900 // inject some minimal setOutputSurface test 901 // TODO: change this test to also change the surface midstream 902 try { 903 mCodec.setOutputSurface(null); 904 fail("should not be able to set surface to NULL"); 905 } catch (IllegalArgumentException e) {} 906 mCodec.setOutputSurface(mSurface.getSurface()); 907 908 mInputBuffers = mCodec.getInputBuffers(); 909 mOutputBuffers = mCodec.getOutputBuffers(); 910 Log.i(TAG, "configured " + mInputBuffers.length + " input[" + 911 mInputBuffers[0].capacity() + "] and " + 912 mOutputBuffers.length + "output[" + 913 (mOutputBuffers[0] == null ? null : mOutputBuffers[0].capacity()) + "]"); 914 mQueuedEos = false; 915 mRenderedTimeStamps.clear(); 916 mLastRenderNanoTime = System.nanoTime(); 917 mFramesNotifiedRendered = 0; 918 } 919 920 public void stop() { 921 Log.i(TAG, "stop"); 922 mCodec.stop(); 923 // if we have queued 32 frames or more, at least one should have been notified 924 // to have rendered. 925 if (mRenderedTimeStamps.size() > 32 && mFramesNotifiedRendered == 0) { 926 fail("rendered " + mRenderedTimeStamps.size() + 927 " frames, but none have been notified."); 928 } 929 } 930 931 public void flush() { 932 Log.i(TAG, "flush"); 933 mCodec.flush(); 934 mQueuedEos = false; 935 mTimeStamps.clear(); 936 } 937 938 public String dequeueAndReleaseOutputBuffer(MediaCodec.BufferInfo info) { 939 int ix = mCodec.dequeueOutputBuffer(info, kTimeOutUs); 940 if (ix == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 941 mOutputBuffers = mCodec.getOutputBuffers(); 942 Log.d(TAG, "output buffers have changed."); 943 return null; 944 } else if (ix == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 945 MediaFormat format = mCodec.getOutputFormat(); 946 Log.d(TAG, "output format has changed to " + format); 947 int colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 948 mDoChecksum = isRecognizedFormat(colorFormat); 949 return null; 950 } else if (ix < 0) { 951 Log.v(TAG, "no output"); 952 return null; 953 } 954 /* create checksum */ 955 long sum = 0; 956 957 958 Log.v(TAG, "dequeue #" + ix + " => { [" + info.size + "] flags=" + info.flags + 959 " @" + info.presentationTimeUs + "}"); 960 961 // we get a nonzero size for valid decoded frames 962 boolean doRender = (info.size != 0); 963 if (mSurface.getSurface() == null) { 964 if (mDoChecksum) { 965 sum = checksum(mOutputBuffers[ix], info.size, mCRC); 966 } 967 mCodec.releaseOutputBuffer(ix, doRender); 968 } else if (doRender) { 969 // If using SurfaceTexture, as soon as we call releaseOutputBuffer, the 970 // buffer will be forwarded to SurfaceTexture to convert to a texture. 971 // The API doesn't guarantee that the texture will be available before 972 // the call returns, so we need to wait for the onFrameAvailable callback 973 // to fire. If we don't wait, we risk dropping frames. 974 mSurface.prepare(); 975 mCodec.releaseOutputBuffer(ix, doRender); 976 mSurface.waitForDraw(); 977 if (mDoChecksum) { 978 sum = mSurface.checksum(); 979 } 980 } else { 981 mCodec.releaseOutputBuffer(ix, doRender); 982 } 983 984 if (doRender) { 985 mRenderedTimeStamps.add(info.presentationTimeUs); 986 if (!mTimeStamps.remove(info.presentationTimeUs)) { 987 warn("invalid timestamp " + info.presentationTimeUs + ", queued " + 988 mTimeStamps); 989 } 990 } 991 992 return String.format(Locale.US, "{pts=%d, flags=%x, data=0x%x}", 993 info.presentationTimeUs, info.flags, sum); 994 } 995 996 /* returns true iff queued a frame */ 997 public boolean queueInputBuffer(Media media, int frameIx, boolean EOS) { 998 return queueInputBuffer(media, frameIx, EOS, 0); 999 } 1000 1001 public boolean queueInputBuffer(Media media, int frameIx, boolean EOS, long adjustTimeUs) { 1002 if (mQueuedEos) { 1003 return false; 1004 } 1005 1006 int ix = mCodec.dequeueInputBuffer(kTimeOutUs); 1007 1008 if (ix < 0) { 1009 return false; 1010 } 1011 1012 ByteBuffer buf = mInputBuffers[ix]; 1013 Media.Frame frame = media.getFrame(frameIx); 1014 buf.clear(); 1015 1016 long presentationTimeUs = adjustTimeUs; 1017 int flags = 0; 1018 if (frame != null) { 1019 buf.put((ByteBuffer)frame.buf.clear()); 1020 presentationTimeUs += frame.presentationTimeUs; 1021 flags = frame.flags; 1022 } 1023 1024 if (EOS) { 1025 flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM; 1026 mQueuedEos = true; 1027 } 1028 1029 mTimeStamps.add(presentationTimeUs); 1030 Log.v(TAG, "queue { [" + buf.position() + "]=" + byteBufferToString(buf, 0, 16) + 1031 " flags=" + flags + " @" + presentationTimeUs + "} => #" + ix); 1032 mCodec.queueInputBuffer( 1033 ix, 0 /* offset */, buf.position(), presentationTimeUs, flags); 1034 return true; 1035 } 1036 1037 /* returns number of frames received multiplied by -1 if received EOS, 1 otherwise */ 1038 public int queueInputBufferRange( 1039 Media media, int frameStartIx, int frameEndIx, boolean sendEosAtEnd, 1040 boolean waitForEos) { 1041 return queueInputBufferRange(media,frameStartIx,frameEndIx,sendEosAtEnd,waitForEos,0); 1042 } 1043 1044 public void queueCSD(MediaFormat format) { 1045 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 1046 for (int csdIx = 0; ; ++csdIx) { 1047 ByteBuffer csdBuf = format.getByteBuffer("csd-" + csdIx); 1048 if (csdBuf == null) { 1049 break; 1050 } 1051 1052 int ix = mCodec.dequeueInputBuffer(kCSDTimeOutUs); 1053 if (ix < 0) { 1054 fail("Could not dequeue input buffer for CSD #" + csdIx); 1055 return; 1056 } 1057 1058 ByteBuffer buf = mInputBuffers[ix]; 1059 buf.clear(); 1060 buf.put((ByteBuffer)csdBuf.clear()); 1061 Log.v(TAG, "queue-CSD { [" + buf.position() + "]=" + 1062 byteBufferToString(buf, 0, 16) + "} => #" + ix); 1063 mCodec.queueInputBuffer( 1064 ix, 0 /* offset */, buf.position(), 0 /* timeUs */, 1065 MediaCodec.BUFFER_FLAG_CODEC_CONFIG); 1066 } 1067 } 1068 1069 public int queueInputBufferRange( 1070 Media media, int frameStartIx, int frameEndIx, boolean sendEosAtEnd, 1071 boolean waitForEos, long adjustTimeUs) { 1072 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 1073 int frameIx = frameStartIx; 1074 int numFramesDecoded = 0; 1075 boolean sawOutputEos = false; 1076 int deadDecoderCounter = 0; 1077 ArrayList<String> frames = new ArrayList<String>(); 1078 String buf = null; 1079 // After all input buffers are queued, dequeue as many output buffers as possible. 1080 while ((waitForEos && !sawOutputEos) || frameIx < frameEndIx || buf != null) { 1081 if (frameIx < frameEndIx) { 1082 if (queueInputBuffer( 1083 media, 1084 frameIx, 1085 sendEosAtEnd && (frameIx + 1 == frameEndIx), 1086 adjustTimeUs)) { 1087 frameIx++; 1088 } 1089 } 1090 1091 buf = dequeueAndReleaseOutputBuffer(info); 1092 if (buf != null) { 1093 // Some decoders output a 0-sized buffer at the end. Disregard those. 1094 if (info.size > 0) { 1095 deadDecoderCounter = 0; 1096 numFramesDecoded++; 1097 } 1098 1099 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1100 Log.d(TAG, "saw output EOS."); 1101 sawOutputEos = true; 1102 } 1103 } 1104 if (++deadDecoderCounter >= 100) { 1105 warn("have not received an output frame for a while"); 1106 break; 1107 } 1108 } 1109 1110 if (numFramesDecoded < frameEndIx - frameStartIx - 16) { 1111 fail("Queued " + (frameEndIx - frameStartIx) + " frames but only received " + 1112 numFramesDecoded); 1113 } 1114 return (sawOutputEos ? -1 : 1) * numFramesDecoded; 1115 } 1116 1117 void release() { 1118 Log.i(TAG, "release"); 1119 mCodec.release(); 1120 mSurface.release(); 1121 mInputBuffers = null; 1122 mOutputBuffers = null; 1123 mCodec = null; 1124 mSurface = null; 1125 } 1126 1127 // don't fail on exceptions in release() 1128 void releaseQuietly() { 1129 try { 1130 Log.i(TAG, "release"); 1131 mCodec.release(); 1132 } catch (Throwable e) { 1133 Log.e(TAG, "Exception while releasing codec", e); 1134 } 1135 mSurface.release(); 1136 mInputBuffers = null; 1137 mOutputBuffers = null; 1138 mCodec = null; 1139 mSurface = null; 1140 } 1141 }; 1142 1143 /* from EncodeDecodeTest */ 1144 private static boolean isRecognizedFormat(int colorFormat) { 1145 switch (colorFormat) { 1146 // these are the formats we know how to handle for this test 1147 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: 1148 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: 1149 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: 1150 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: 1151 case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: 1152 return true; 1153 default: 1154 return false; 1155 } 1156 } 1157 1158 private int countFrames( 1159 String codecName, MediaCodecInfo codecInfo, Media media, int eosframe, TestSurface s) 1160 throws Exception { 1161 Decoder codec = new Decoder(codecName); 1162 codec.configureAndStart(media.getFormat(), s /* surface */); 1163 1164 int numframes = codec.queueInputBufferRange( 1165 media, 0, eosframe, true /* sendEos */, true /* waitForEos */); 1166 if (numframes >= 0) { 1167 Log.w(TAG, "Did not receive EOS"); 1168 } else { 1169 numframes *= -1; 1170 } 1171 1172 codec.stop(); 1173 codec.release(); 1174 return numframes; 1175 } 1176 } 1177 1178 /* ====================================================================== */ 1179 /* Video Media Asset */ 1180 /* ====================================================================== */ 1181 class Media { 1182 private final static String TAG = "AdaptiveMedia"; 1183 private MediaFormat mFormat; 1184 private MediaFormat mAdaptiveFormat; 1185 static class Frame { 1186 long presentationTimeUs; 1187 int flags; 1188 ByteBuffer buf; 1189 public Frame(long _pts, int _flags, ByteBuffer _buf) { 1190 presentationTimeUs = _pts; 1191 flags = _flags; 1192 buf = _buf; 1193 } 1194 }; 1195 private Frame[] mFrames; 1196 1197 public Frame getFrame(int ix) { 1198 /* this works even on short sample as frame is allocated as null */ 1199 if (ix >= 0 && ix < mFrames.length) { 1200 return mFrames[ix]; 1201 } 1202 return null; 1203 } 1204 private Media(MediaFormat format, MediaFormat adaptiveFormat, int numFrames) { 1205 /* need separate copies of format as once we add adaptive flags to 1206 MediaFormat, we cannot remove them */ 1207 mFormat = format; 1208 mAdaptiveFormat = adaptiveFormat; 1209 mFrames = new Frame[numFrames]; 1210 } 1211 1212 public MediaFormat getFormat() { 1213 return mFormat; 1214 } 1215 1216 public static MediaFormat removeCSD(MediaFormat orig) { 1217 MediaFormat copy = MediaFormat.createVideoFormat( 1218 orig.getString(orig.KEY_MIME), 1219 orig.getInteger(orig.KEY_WIDTH), orig.getInteger(orig.KEY_HEIGHT)); 1220 for (String k : new String[] { 1221 orig.KEY_FRAME_RATE, orig.KEY_MAX_WIDTH, orig.KEY_MAX_HEIGHT, 1222 orig.KEY_MAX_INPUT_SIZE 1223 }) { 1224 if (orig.containsKey(k)) { 1225 try { 1226 copy.setInteger(k, orig.getInteger(k)); 1227 } catch (ClassCastException e) { 1228 try { 1229 copy.setFloat(k, orig.getFloat(k)); 1230 } catch (ClassCastException e2) { 1231 // Could not copy value. Don't fail here, as having non-standard 1232 // value types for defined keys is permissible by the media API 1233 // for optional keys. 1234 } 1235 } 1236 } 1237 } 1238 return copy; 1239 } 1240 1241 public MediaFormat getAdaptiveFormat(int width, int height) { 1242 mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, width); 1243 mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, height); 1244 return mAdaptiveFormat; 1245 } 1246 1247 public String getMime() { 1248 return mFormat.getString(MediaFormat.KEY_MIME); 1249 } 1250 1251 public int getWidth() { 1252 return mFormat.getInteger(MediaFormat.KEY_WIDTH); 1253 } 1254 1255 public int getHeight() { 1256 return mFormat.getInteger(MediaFormat.KEY_HEIGHT); 1257 } 1258 1259 public final static int RANGE_START = 0; 1260 public final static int RANGE_END = 1; 1261 public final static int RANGE_DURATION = 2; 1262 1263 public long getTimestampRangeValue(int frameStartIx, int frameEndIx, int kind) { 1264 long min = Long.MAX_VALUE, max = Long.MIN_VALUE; 1265 for (int frameIx = frameStartIx; frameIx < frameEndIx; frameIx++) { 1266 Frame frame = getFrame(frameIx); 1267 if (frame != null) { 1268 if (min > frame.presentationTimeUs) { 1269 min = frame.presentationTimeUs; 1270 } 1271 if (max < frame.presentationTimeUs) { 1272 max = frame.presentationTimeUs; 1273 } 1274 } 1275 } 1276 if (kind == RANGE_START) { 1277 return min; 1278 } else if (kind == RANGE_END) { 1279 return max; 1280 } else if (kind == RANGE_DURATION) { 1281 return max - min; 1282 } else { 1283 throw new IllegalArgumentException("kind is not valid: " + kind); 1284 } 1285 } 1286 1287 public static Media read(Context context, int video, int numFrames) 1288 throws java.io.IOException { 1289 MediaExtractor extractor = new MediaExtractor(); 1290 AssetFileDescriptor testFd = context.getResources().openRawResourceFd(video); 1291 extractor.setDataSource(testFd.getFileDescriptor(), testFd.getStartOffset(), 1292 testFd.getLength()); 1293 1294 Media media = new Media( 1295 extractor.getTrackFormat(0), extractor.getTrackFormat(0), numFrames); 1296 extractor.selectTrack(0); 1297 1298 Log.i(TAG, "format=" + media.getFormat()); 1299 ArrayList<ByteBuffer> csds = new ArrayList<ByteBuffer>(); 1300 for (String tag: new String[] { "csd-0", "csd-1" }) { 1301 if (media.getFormat().containsKey(tag)) { 1302 ByteBuffer csd = media.getFormat().getByteBuffer(tag); 1303 Log.i(TAG, tag + "=" + AdaptivePlaybackTest.byteBufferToString(csd, 0, 16)); 1304 csds.add(csd); 1305 } 1306 } 1307 1308 ByteBuffer readBuf = ByteBuffer.allocate(2000000); 1309 for (int ix = 0; ix < numFrames; ix++) { 1310 int sampleSize = extractor.readSampleData(readBuf, 0 /* offset */); 1311 1312 if (sampleSize < 0) { 1313 throw new IllegalArgumentException("media is too short at " + ix + " frames"); 1314 } else { 1315 readBuf.position(0).limit(sampleSize); 1316 for (ByteBuffer csd: csds) { 1317 sampleSize += csd.capacity(); 1318 } 1319 ByteBuffer buf = ByteBuffer.allocate(sampleSize); 1320 for (ByteBuffer csd: csds) { 1321 csd.clear(); 1322 buf.put(csd); 1323 csd.clear(); 1324 Log.i(TAG, "csd[" + csd.capacity() + "]"); 1325 } 1326 Log.i(TAG, "frame-" + ix + "[" + sampleSize + "]"); 1327 csds.clear(); 1328 buf.put(readBuf); 1329 media.mFrames[ix] = new Frame( 1330 extractor.getSampleTime(), 1331 extractor.getSampleFlags(), 1332 buf); 1333 extractor.advance(); 1334 } 1335 } 1336 extractor.release(); 1337 testFd.close(); 1338 return media; 1339 } 1340 } 1341 1342 /* ====================================================================== */ 1343 /* Codec, CodecList and CodecFactory */ 1344 /* ====================================================================== */ 1345 class Codec { 1346 private final static String TAG = "AdaptiveCodec"; 1347 1348 public String name; 1349 public CodecCapabilities capabilities; 1350 public Media[] mediaList; 1351 public boolean adaptive; 1352 public Codec(String n, CodecCapabilities c, Media[] m) { 1353 name = n; 1354 capabilities = c; 1355 List<Media> medias = new ArrayList<Media>(); 1356 1357 if (capabilities == null) { 1358 adaptive = false; 1359 } else { 1360 Log.w(TAG, "checking capabilities of " + name + " for " + m[0].getMime()); 1361 adaptive = capabilities.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback); 1362 1363 for (Media media : m) { 1364 if (media.getHeight() >= 720 && 1365 !capabilities.isFormatSupported(media.getFormat())) { 1366 // skip if 720p and up is unsupported 1367 Log.w(TAG, "codec " + name + " doesn't support " + media.getFormat()); 1368 continue; 1369 } 1370 medias.add(media); 1371 } 1372 } 1373 1374 if (medias.size() < 2) { 1375 Log.e(TAG, "codec " + name + " doesn't support required resolutions"); 1376 } 1377 mediaList = medias.subList(0, 2).toArray(new Media[2]); 1378 } 1379 } 1380 1381 class CodecList extends ArrayList<Codec> { }; 1382 1383 /* all codecs of mime, plus named codec if exists */ 1384 class CodecFamily extends CodecList { 1385 private final static String TAG = "AdaptiveCodecFamily"; 1386 private static final int NUM_FRAMES = AdaptivePlaybackTest.NUM_FRAMES; 1387 1388 public CodecFamily(Context context, String mime, int ... resources) { 1389 try { 1390 /* read all media */ 1391 Media[] mediaList = new Media[resources.length]; 1392 for (int i = 0; i < resources.length; i++) { 1393 Log.v(TAG, "reading media " + resources[i]); 1394 Media media = Media.read(context, resources[i], NUM_FRAMES); 1395 assert media.getMime().equals(mime): 1396 "test stream " + resources[i] + " has " + media.getMime() + 1397 " mime type instead of " + mime; 1398 1399 /* assuming the first timestamp is the smallest */ 1400 long firstPTS = media.getFrame(0).presentationTimeUs; 1401 long smallestPTS = media.getTimestampRangeValue(0, NUM_FRAMES, Media.RANGE_START); 1402 1403 assert firstPTS == smallestPTS: 1404 "first frame timestamp (" + firstPTS + ") is not smallest (" + 1405 smallestPTS + ")"; 1406 1407 mediaList[i] = media; 1408 } 1409 1410 /* enumerate codecs */ 1411 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1412 for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) { 1413 if (codecInfo.isEncoder()) { 1414 continue; 1415 } 1416 for (String type : codecInfo.getSupportedTypes()) { 1417 if (type.equals(mime)) { 1418 add(new Codec( 1419 codecInfo.getName(), 1420 codecInfo.getCapabilitiesForType(mime), 1421 mediaList)); 1422 break; 1423 } 1424 } 1425 } 1426 } catch (Throwable t) { 1427 Log.wtf("Constructor failed", t); 1428 throw new RuntimeException("constructor failed", t); 1429 } 1430 } 1431 } 1432 1433 /* all codecs of mime, except named codec if exists */ 1434 class CodecFamilySpecific extends CodecList { 1435 public CodecFamilySpecific( 1436 Context context, String mime, boolean isGoogle, int ... resources) { 1437 for (Codec c: new CodecFamily(context, mime, resources)) { 1438 if (MediaUtils.isGoogle(c.name) == isGoogle) { 1439 add(c); 1440 } 1441 } 1442 } 1443 } 1444 1445 class CodecFactory { 1446 public CodecList createCodecList( 1447 Context context, String mime, int ...resources) { 1448 return new CodecFamily(context, mime, resources); 1449 } 1450 } 1451 1452 class SWCodecFactory extends CodecFactory { 1453 public CodecList createCodecList( 1454 Context context, String mime, int ...resources) { 1455 return new CodecFamilySpecific(context, mime, true, resources); 1456 } 1457 } 1458 1459 class HWCodecFactory extends CodecFactory { 1460 public CodecList createCodecList( 1461 Context context, String mime, int ...resources) { 1462 return new CodecFamilySpecific(context, mime, false, resources); 1463 } 1464 } 1465 1466 /* ====================================================================== */ 1467 /* Test Steps, Test (Case)s, and Test List */ 1468 /* ====================================================================== */ 1469 class StepRunner implements Runnable { 1470 public StepRunner(Step s) { 1471 mStep = s; 1472 mThrowed = null; 1473 } 1474 public void run() { 1475 try { 1476 mStep.run(); 1477 } catch (Throwable e) { 1478 mThrowed = e; 1479 } 1480 } 1481 public void throwThrowed() throws Throwable { 1482 if (mThrowed != null) { 1483 throw mThrowed; 1484 } 1485 } 1486 private Throwable mThrowed; 1487 private Step mStep; 1488 } 1489 1490 class TestList extends ArrayList<Step> { 1491 private final static String TAG = "AdaptiveTestList"; 1492 public void run() throws Throwable { 1493 Throwable res = null; 1494 for (Step step: this) { 1495 try { 1496 Log.i(TAG, step.getDescription()); 1497 if (step.stepSurface().needsToRunInSeparateThread()) { 1498 StepRunner runner = new StepRunner(step); 1499 Thread th = new Thread(runner, "stepWrapper"); 1500 th.start(); 1501 th.join(); 1502 runner.throwThrowed(); 1503 } else { 1504 step.run(); 1505 } 1506 } catch (Throwable e) { 1507 Log.e(TAG, "while " + step.getDescription(), e); 1508 res = e; 1509 mFailedSteps++; 1510 } finally { 1511 mWarnings += step.getWarnings(); 1512 } 1513 } 1514 if (res != null) { 1515 throw new RuntimeException( 1516 mFailedSteps + " failed steps, " + mWarnings + " warnings", 1517 res); 1518 } 1519 } 1520 public int getWarnings() { 1521 return mWarnings; 1522 } 1523 public int getFailures() { 1524 return mFailedSteps; 1525 } 1526 private int mFailedSteps; 1527 private int mWarnings; 1528 } 1529 1530 abstract class Test { 1531 public static final int FORMAT_ADAPTIVE_LARGEST = 1; 1532 public static final int FORMAT_ADAPTIVE_FIRST = 2; 1533 public static final int FORMAT_REGULAR = 3; 1534 1535 protected int mFormatType; 1536 protected boolean mUseSurface; 1537 protected boolean mUseSurfaceTexture; 1538 1539 public Test() { 1540 mFormatType = FORMAT_REGULAR; 1541 mUseSurface = true; 1542 mUseSurfaceTexture = false; 1543 } 1544 1545 public Test adaptive() { 1546 mFormatType = FORMAT_ADAPTIVE_LARGEST; 1547 return this; 1548 } 1549 1550 public Test adaptiveSmall() { 1551 mFormatType = FORMAT_ADAPTIVE_FIRST; 1552 return this; 1553 } 1554 1555 public Test byteBuffer() { 1556 mUseSurface = false; 1557 mUseSurfaceTexture = false; 1558 return this; 1559 } 1560 1561 public Test texture() { 1562 mUseSurface = false; 1563 mUseSurfaceTexture = true; 1564 return this; 1565 } 1566 1567 public void checkAdaptiveFormat() { 1568 assert mFormatType != FORMAT_REGULAR: 1569 "must be used with adaptive format"; 1570 } 1571 1572 abstract protected TestSurface getSurface(); 1573 1574 /* TRICKY: format is updated in each test run as we are actually reusing the 1575 same 2 MediaFormat objects returned from MediaExtractor. Therefore, 1576 format must be explicitly obtained in each test step. 1577 1578 returns null if codec does not support the format. 1579 */ 1580 protected MediaFormat getFormat(Codec c) { 1581 return getFormat(c, 0); 1582 } 1583 1584 protected MediaFormat getFormat(Codec c, int i) { 1585 MediaFormat format = null; 1586 if (mFormatType == FORMAT_REGULAR) { 1587 format = c.mediaList[i].getFormat(); 1588 } else if (mFormatType == FORMAT_ADAPTIVE_FIRST && c.adaptive) { 1589 format = c.mediaList[i].getAdaptiveFormat( 1590 c.mediaList[i].getWidth(), c.mediaList[i].getHeight()); 1591 } else if (mFormatType == FORMAT_ADAPTIVE_LARGEST && c.adaptive) { 1592 /* update adaptive format to max size used */ 1593 format = c.mediaList[i].getAdaptiveFormat(0, 0); 1594 for (Media media : c.mediaList) { 1595 /* get the largest width, and the largest height independently */ 1596 if (media.getWidth() > format.getInteger(MediaFormat.KEY_MAX_WIDTH)) { 1597 format.setInteger(MediaFormat.KEY_MAX_WIDTH, media.getWidth()); 1598 } 1599 if (media.getHeight() > format.getInteger(MediaFormat.KEY_MAX_HEIGHT)) { 1600 format.setInteger(MediaFormat.KEY_MAX_HEIGHT, media.getHeight()); 1601 } 1602 } 1603 } 1604 return format; 1605 } 1606 1607 public boolean isValid(Codec c) { return true; } 1608 public abstract void addTests(TestList tests, Codec c); 1609 } 1610 1611 abstract class Step { 1612 private static final String TAG = "AdaptiveStep"; 1613 1614 public Step(String title, Test instance, Codec codec, Media media) { 1615 mTest = instance; 1616 mCodec = codec; 1617 mMedia = media; 1618 mDescription = title + " on " + stepSurface().getSurface() + " using " + 1619 mCodec.name + " and " + stepFormat(); 1620 } 1621 public Step(String title, Test instance, Codec codec, int mediaIx) { 1622 this(title, instance, codec, codec.mediaList[mediaIx]); 1623 } 1624 public Step(String title, Test instance, Codec codec) { 1625 this(title, instance, codec, 0); 1626 } 1627 public Step(String description) { 1628 mDescription = description; 1629 } 1630 public Step() { } 1631 1632 public abstract void run() throws Throwable; 1633 1634 private String mDescription; 1635 private Test mTest; 1636 private Codec mCodec; 1637 private Media mMedia; 1638 private int mWarnings; 1639 1640 /* TRICKY: use non-standard getter names so that we don't conflict with the getters 1641 in the Test classes, as most test Steps are defined as anonymous classes inside 1642 the test classes. */ 1643 public MediaFormat stepFormat() { 1644 int ix = Arrays.asList(mCodec.mediaList).indexOf(mMedia); 1645 return mTest.getFormat(mCodec, ix); 1646 } 1647 1648 public TestSurface stepSurface() { 1649 return mTest.getSurface(); 1650 } 1651 1652 public Media stepMedia() { return mMedia; } 1653 1654 public String getDescription() { return mDescription; } 1655 public int getWarnings() { return mWarnings; } 1656 1657 public void warn(String message) { 1658 Log.e(TAG, "WARNING: " + message + " in " + getDescription()); 1659 mWarnings++; 1660 } 1661 public void warn(String message, Throwable t) { 1662 Log.e(TAG, "WARNING: " + message + " in " + getDescription(), t); 1663 mWarnings++; 1664 } 1665 public void warn(Iterable<String> warnings) { 1666 for (String warning: warnings) { 1667 warn(warning); 1668 } 1669 } 1670 } 1671 1672 interface TestSurface { 1673 public Surface getSurface(); 1674 public long checksum(); 1675 public void release(); 1676 public void prepare(); // prepare surface prior to render 1677 public void waitForDraw(); // wait for rendering to take place 1678 public boolean needsToRunInSeparateThread(); 1679 } 1680 1681 class DecoderSurface extends OutputSurface implements TestSurface { 1682 private ByteBuffer mBuf; 1683 int mWidth; 1684 int mHeight; 1685 CRC32 mCRC; 1686 1687 public DecoderSurface(int width, int height, CRC32 crc) { 1688 super(width, height); 1689 mWidth = width; 1690 mHeight = height; 1691 mCRC = crc; 1692 mBuf = ByteBuffer.allocateDirect(4 * width * height); 1693 } 1694 1695 public void prepare() { 1696 makeCurrent(); 1697 } 1698 1699 public void waitForDraw() { 1700 awaitNewImage(); 1701 drawImage(); 1702 } 1703 1704 public long checksum() { 1705 mBuf.position(0); 1706 GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, mBuf); 1707 mBuf.position(0); 1708 return AdaptivePlaybackTest.checksum(mBuf, mBuf.capacity(), mCRC); 1709 } 1710 1711 public void release() { 1712 super.release(); 1713 mBuf = null; 1714 } 1715 1716 public boolean needsToRunInSeparateThread() { 1717 return true; 1718 } 1719 } 1720 1721 class ActivitySurface implements TestSurface { 1722 private Surface mSurface; 1723 public ActivitySurface(Surface s) { 1724 mSurface = s; 1725 } 1726 public Surface getSurface() { 1727 return mSurface; 1728 } 1729 public void prepare() { } 1730 public void waitForDraw() { } 1731 public long checksum() { 1732 return 0; 1733 } 1734 public void release() { 1735 // don't release activity surface, as it is reusable 1736 } 1737 public boolean needsToRunInSeparateThread() { 1738 return false; 1739 } 1740 } 1741