Home | History | Annotate | Download | only in cts
      1 /*
      2  * Copyright (C) 2016 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.media.cts;
     18 
     19 import android.annotation.RawRes;
     20 import android.app.ActivityManager;
     21 import android.content.Context;
     22 import android.content.pm.PackageManager;
     23 import android.content.res.AssetFileDescriptor;
     24 import android.content.res.Resources;
     25 import android.media.AudioAttributes;
     26 import android.media.AudioDeviceInfo;
     27 import android.media.AudioFormat;
     28 import android.media.AudioManager;
     29 import android.media.AudioTimestamp;
     30 import android.media.AudioTrack;
     31 import android.util.Log;
     32 
     33 import com.android.compatibility.common.util.CtsAndroidTestCase;
     34 
     35 import java.io.BufferedInputStream;
     36 import java.io.InputStream;
     37 import java.util.ArrayList;
     38 import java.util.Random;
     39 
     40 // Test the Java AudioTrack surround sound and HDMI passthrough.
     41 // Most tests involve creating a track with a given format and then playing
     42 // a few seconds of audio. The playback is verified by measuring the output
     43 // sample rate based on the AudioTimestamps.
     44 
     45 public class AudioTrackSurroundTest extends CtsAndroidTestCase {
     46     private static final String TAG = "AudioTrackSurroundTest";
     47 
     48     private static final double MAX_RATE_TOLERANCE_FRACTION = 0.01;
     49     private static final boolean LOG_TIMESTAMPS = false; // set true for debugging
     50 
     51     // Set this true to prefer the device that supports the particular encoding.
     52     // But note that as of 3/25/2016, a bug causes Direct tracks to fail.
     53     // So only set true when debugging that problem.
     54     private static final boolean USE_PREFERRED_DEVICE = false;
     55 
     56     // Should we fail if there is no PCM16 device reported by device enumeration?
     57     // This can happen if, for example, an ATV set top box does not have its HDMI cable plugged in.
     58     private static final boolean REQUIRE_PCM_DEVICE = false;
     59 
     60     private final static long NANOS_PER_MILLISECOND = 1000000L;
     61     private final static int MILLIS_PER_SECOND = 1000;
     62     private final static long NANOS_PER_SECOND = NANOS_PER_MILLISECOND * MILLIS_PER_SECOND;
     63 
     64     private final static int RES_AC3_VOICE_48000 = R.raw.voice12_48k_128kbps_15s_ac3;
     65 
     66     private static int mLastPlayedEncoding = AudioFormat.ENCODING_INVALID;
     67 
     68     // Devices that support various encodings.
     69     private static boolean mDeviceScanComplete = false;
     70     private static AudioDeviceInfo mInfoPCM16 = null;
     71     private static AudioDeviceInfo mInfoAC3 = null;
     72     private static AudioDeviceInfo mInfoE_AC3 = null;
     73     private static AudioDeviceInfo mInfoDTS = null;
     74     private static AudioDeviceInfo mInfoDTS_HD = null;
     75     private static AudioDeviceInfo mInfoIEC61937 = null;
     76 
     77     private static void log(String testName, String message) {
     78         Log.i(TAG, "[" + testName + "] " + message);
     79     }
     80 
     81     private static void logw(String testName, String message) {
     82         Log.w(TAG, "[" + testName + "] " + message);
     83     }
     84 
     85     private static void loge(String testName, String message) {
     86         Log.e(TAG, "[" + testName + "] " + message);
     87     }
     88 
     89     // This is a special method that is called automatically before each test.
     90     @Override
     91     protected void setUp() throws Exception {
     92         // Note that I tried to only scan for encodings once but the static
     93         // data did not persist properly. That may be a bug.
     94         // For now, just scan before every test.
     95         scanDevicesForEncodings();
     96     }
     97 
     98     private void scanDevicesForEncodings() throws Exception {
     99         final String MTAG = "scanDevicesForEncodings";
    100         // Scan devices to see which encodings are supported.
    101         AudioManager audioManager = (AudioManager) getContext()
    102                 .getSystemService(Context.AUDIO_SERVICE);
    103         AudioDeviceInfo[] infos = audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS);
    104         for (AudioDeviceInfo info : infos) {
    105             log(MTAG, "scanning devices, name = " + info.getProductName()
    106                     + ", id = " + info.getId()
    107                     + ", " + (info.isSink() ? "sink" : "source")
    108                     + ", type = " + info.getType()
    109                     + " ------");
    110             String text = "{";
    111             for (int encoding : info.getEncodings()) {
    112                 text += String.format("0x%08X, ", encoding);
    113             }
    114             text += "}";
    115             log(MTAG, "  encodings = " + text);
    116             text = "{";
    117             for (int rate : info.getSampleRates()) {
    118                 text += rate + ", ";
    119             }
    120             text += "}";
    121             log(MTAG, "  sample rates = " + text);
    122             if (info.isSink()) {
    123                 for (int encoding : info.getEncodings()) {
    124                     switch (encoding) {
    125                         case AudioFormat.ENCODING_PCM_16BIT:
    126                             mInfoPCM16 = info;
    127                             log(MTAG, "mInfoPCM16 set to " + info);
    128                             break;
    129                         case AudioFormat.ENCODING_AC3:
    130                             mInfoAC3 = info;
    131                             log(MTAG, "mInfoAC3 set to " + info);
    132                             break;
    133                         case AudioFormat.ENCODING_E_AC3:
    134                             mInfoE_AC3 = info;
    135                             log(MTAG, "mInfoE_AC3 set to " + info);
    136                             break;
    137                         case AudioFormat.ENCODING_DTS:
    138                             mInfoDTS = info;
    139                             log(MTAG, "mInfoDTS set to " + info);
    140                             break;
    141                         case AudioFormat.ENCODING_DTS_HD:
    142                             mInfoDTS_HD = info;
    143                             log(MTAG, "mInfoDTS_HD set to " + info);
    144                             break;
    145                         case AudioFormat.ENCODING_IEC61937:
    146                             mInfoIEC61937 = info;
    147                             log(MTAG, "mInfoIEC61937 set to " + info);
    148                             break;
    149                         default:
    150                             // This is OK. It is just an encoding that we don't care about.
    151                             break;
    152                     }
    153                 }
    154             }
    155         }
    156     }
    157 
    158     // Load a resource into a byte[]
    159     private byte[] loadRawResourceBytes(@RawRes int id) throws Exception {
    160         AssetFileDescriptor masterFd = getContext().getResources().openRawResourceFd(id);
    161         long masterLength = masterFd.getLength();
    162         byte[] masterBuffer = new byte[(int) masterLength];
    163         InputStream is = masterFd.createInputStream();
    164         BufferedInputStream bis = new BufferedInputStream(is);
    165         int result = bis.read(masterBuffer);
    166         bis.close();
    167         masterFd.close();
    168         return masterBuffer;
    169     }
    170 
    171     // Load a resource into a short[]
    172     private short[] loadRawResourceShorts(@RawRes int id) throws Exception {
    173         AssetFileDescriptor masterFd = getContext().getResources().openRawResourceFd(id);
    174         long masterLength = masterFd.getLength();
    175         short[] masterBuffer = new short[(int) (masterLength / 2)];
    176         InputStream is = masterFd.createInputStream();
    177         BufferedInputStream bis = new BufferedInputStream(is);
    178         for (int i = 0; i < masterBuffer.length; i++) {
    179             int lo = bis.read(); // assume Little Endian
    180             int hi = bis.read();
    181             masterBuffer[i] = (short) (hi * 256 + lo);
    182         }
    183         bis.close();
    184         masterFd.close();
    185         return masterBuffer;
    186     }
    187 
    188     public void testLoadSineSweep() throws Exception {
    189         final String TEST_NAME = "testLoadSineSweep";
    190         short[] shortData = loadRawResourceShorts(R.raw.sinesweepraw);
    191         assertTrue(TEST_NAME + ": load sinesweepraw as shorts", shortData.length > 100);
    192         byte[] byteData = loadRawResourceBytes(R.raw.sinesweepraw);
    193         assertTrue(TEST_NAME + ": load sinesweepraw as bytes", byteData.length > shortData.length);
    194     }
    195 
    196     private static AudioTrack createAudioTrack(int sampleRate, int encoding, int channelConfig) {
    197         final String TEST_NAME = "createAudioTrack";
    198         int minBufferSize = AudioTrack.getMinBufferSize(
    199                 sampleRate, channelConfig,
    200                 encoding);
    201         assertTrue(TEST_NAME + ": getMinBufferSize", minBufferSize > 0);
    202         int bufferSize = minBufferSize * 3; // plenty big
    203         AudioTrack track = new AudioTrack(AudioManager.STREAM_MUSIC,
    204                 sampleRate, channelConfig,
    205                 encoding, bufferSize,
    206                 AudioTrack.MODE_STREAM);
    207         return track;
    208     }
    209 
    210     static class TimestampAnalyzer {
    211         ArrayList<AudioTimestamp> mTimestamps = new ArrayList<AudioTimestamp>();
    212         AudioTimestamp mPreviousTimestamp = null;
    213 
    214         static String timestampToString(AudioTimestamp timestamp) {
    215             if (timestamp == null)
    216                 return "null";
    217             return "(pos = " + timestamp.framePosition + ", nanos = " + timestamp.nanoTime + ")";
    218         }
    219 
    220         // Add timestamp if unique and valid.
    221         void addTimestamp(AudioTrack track) {
    222             AudioTimestamp timestamp = new AudioTimestamp();
    223             boolean gotTimestamp = track.getTimestamp(timestamp);
    224             if (gotTimestamp) {
    225                 // Only save timestamps after the data is flowing.
    226                 if (mPreviousTimestamp != null
    227                     && timestamp.framePosition > 0
    228                     && (timestamp.nanoTime != mPreviousTimestamp.nanoTime
    229                             || timestamp.framePosition != mPreviousTimestamp.framePosition)) {
    230                     mTimestamps.add(timestamp);
    231                 }
    232                 mPreviousTimestamp = timestamp;
    233             }
    234         }
    235 
    236         void checkIndividualTimestamps(int sampleRate) {
    237             AudioTimestamp previous = null;
    238             double sumDeltaSquared = 0.0;
    239             int populationSize = 0;
    240             double maxDeltaMillis = 0.0;
    241             // Make sure the timestamps are smooth and don't go retrograde.
    242             for (AudioTimestamp timestamp : mTimestamps) {
    243                 if (previous != null) {
    244 
    245                     assertTrue("framePosition must be monotonic",
    246                             timestamp.framePosition >= previous.framePosition);
    247                     assertTrue("nanoTime must be monotonic",
    248                             timestamp.nanoTime >= previous.nanoTime);
    249 
    250                     if (timestamp.framePosition > previous.framePosition) {
    251                         // Measure timing jitter.
    252                         // Calculate predicted duration based on measured rate and compare
    253                         // it with actual duration.
    254                         final double TOLERANCE_MILLIS = 2.0;
    255                         long elapsedFrames = timestamp.framePosition - previous.framePosition;
    256                         long elapsedNanos = timestamp.nanoTime - previous.nanoTime;
    257                         double measuredMillis = elapsedNanos / (double) NANOS_PER_MILLISECOND;
    258                         double expectedMillis = elapsedFrames * (double) MILLIS_PER_SECOND
    259                             / sampleRate;
    260                         double deltaMillis = measuredMillis - expectedMillis;
    261                         sumDeltaSquared += deltaMillis * deltaMillis;
    262                         populationSize++;
    263                         // We only issue a warning here because the CDD does not mandate a
    264                         // specific tolerance.
    265                         double absDeltaMillis = Math.abs(deltaMillis);
    266                         if (absDeltaMillis > TOLERANCE_MILLIS) {
    267                             Log.w(TAG, "measured time exceeds expected"
    268                                 + ", srate = " + sampleRate
    269                                 + ", frame = " + timestamp.framePosition
    270                                 + ", expected = " + expectedMillis
    271                                 + ", measured = " + measuredMillis + " (msec)"
    272                                 );
    273                         }
    274                         if (absDeltaMillis > maxDeltaMillis) {
    275                             maxDeltaMillis = absDeltaMillis;
    276                         }
    277                     }
    278                 }
    279                 previous = timestamp;
    280             }
    281             Log.d(TAG, "max abs(delta) from expected duration = " + maxDeltaMillis + " msec");
    282             if (populationSize > 0) {
    283                 double deviation = Math.sqrt(sumDeltaSquared / populationSize);
    284                 Log.d(TAG, "standard deviation from expected duration = " + deviation + " msec");
    285             }
    286         }
    287 
    288         // Use collected timestamps to estimate a sample rate.
    289         double estimateSampleRate() {
    290             assertTrue("expect many timestamps, got " + mTimestamps.size(),
    291                     mTimestamps.size() > 10);
    292             // Use first and last timestamp to get the most accurate rate.
    293             AudioTimestamp first = mTimestamps.get(0);
    294             AudioTimestamp last = mTimestamps.get(mTimestamps.size() - 1);
    295             return calculateSampleRate(first, last);
    296         }
    297 
    298         /**
    299          * @param timestamp1
    300          * @param timestamp2
    301          */
    302         private double calculateSampleRate(AudioTimestamp timestamp1, AudioTimestamp timestamp2) {
    303             long elapsedFrames = timestamp2.framePosition - timestamp1.framePosition;
    304             long elapsedNanos = timestamp2.nanoTime - timestamp1.nanoTime;
    305             double measuredRate = elapsedFrames * (double) NANOS_PER_SECOND / elapsedNanos;
    306             if (LOG_TIMESTAMPS) {
    307                 Log.i(TAG, "calculateSampleRate(), elapsedFrames =, " + elapsedFrames
    308                         + ", measuredRate =, "
    309                         + (int) measuredRate);
    310             }
    311             return measuredRate;
    312         }
    313     }
    314 
    315     // Class for looping a recording for several seconds and measuring the sample rate.
    316     // This is not static because it needs to call getContext().
    317     abstract class SamplePlayerBase {
    318         private final int mSampleRate;
    319         private final int mEncoding;
    320         private final int mChannelConfig;
    321         private int mBlockSize = 512;
    322         protected int mOffset = 0;
    323         protected AudioTrack mTrack;
    324         private final TimestampAnalyzer mTimestampAnalyzer = new TimestampAnalyzer();
    325 
    326         SamplePlayerBase(int sampleRate, int encoding, int channelConfig) {
    327             mSampleRate = sampleRate;
    328             mEncoding = encoding;
    329             mChannelConfig = channelConfig;
    330         }
    331 
    332         // Use abstract write to handle byte[] or short[] data.
    333         protected abstract int writeBlock(int numSamples);
    334 
    335         private int primeBuffer() {
    336             // Will not block when track is stopped.
    337             return writeBlock(Integer.MAX_VALUE);
    338         }
    339 
    340         // Add a warning to the assert message that might help folks figure out why their
    341         // PCM test is failing.
    342         private String getPcmWarning() {
    343             return (mInfoPCM16 == null && AudioFormat.isEncodingLinearPcm(mEncoding))
    344                 ? " (No PCM device!)" : "";
    345         }
    346 
    347         /**
    348          * Use a device that we know supports the current encoding.
    349          */
    350         private void usePreferredDevice() {
    351             AudioDeviceInfo info = null;
    352             switch (mEncoding) {
    353                 case AudioFormat.ENCODING_PCM_16BIT:
    354                     info = mInfoPCM16;
    355                     break;
    356                 case AudioFormat.ENCODING_AC3:
    357                     info = mInfoAC3;
    358                     break;
    359                 case AudioFormat.ENCODING_E_AC3:
    360                     info = mInfoE_AC3;
    361                     break;
    362                 case AudioFormat.ENCODING_DTS:
    363                     info = mInfoDTS;
    364                     break;
    365                 case AudioFormat.ENCODING_DTS_HD:
    366                     info = mInfoDTS_HD;
    367                     break;
    368                 case AudioFormat.ENCODING_IEC61937:
    369                     info = mInfoIEC61937;
    370                     break;
    371                 default:
    372                     break;
    373             }
    374 
    375             if (info != null) {
    376                 log(TAG, "track.setPreferredDevice(" + info + ")");
    377                 mTrack.setPreferredDevice(info);
    378             }
    379         }
    380 
    381         public void playAndMeasureRate() throws Exception {
    382             final String TEST_NAME = "playAndMeasureRate";
    383             final long TEST_DURATION_MILLIS = 5000; // just long enough to measure the rate
    384 
    385             if (mLastPlayedEncoding == AudioFormat.ENCODING_INVALID ||
    386                     !AudioFormat.isEncodingLinearPcm(mEncoding) ||
    387                     !AudioFormat.isEncodingLinearPcm(mLastPlayedEncoding)) {
    388                 Log.d(TAG, "switching from format: " + mLastPlayedEncoding
    389                         + " to: " + mEncoding
    390                         + " requires sleep");
    391                 // Switching between compressed formats may require
    392                 // some time for the HAL to adjust and give proper timing.
    393                 // One second should be ok, but we use 2 just in case.
    394                 Thread.sleep(2000 /* millis */);
    395             }
    396             mLastPlayedEncoding = mEncoding;
    397 
    398             log(TEST_NAME, String.format("test using rate = %d, encoding = 0x%08x",
    399                     mSampleRate, mEncoding));
    400             // Create a track and prime it.
    401             mTrack = createAudioTrack(mSampleRate, mEncoding, mChannelConfig);
    402             try {
    403                 assertEquals(TEST_NAME + ": track created" + getPcmWarning(),
    404                         AudioTrack.STATE_INITIALIZED,
    405                         mTrack.getState());
    406 
    407                 if (USE_PREFERRED_DEVICE) {
    408                     usePreferredDevice();
    409                 }
    410 
    411                 int bytesWritten = 0;
    412                 mOffset = primeBuffer(); // prime the buffer
    413                 assertTrue(TEST_NAME + ": priming offset = " + mOffset + getPcmWarning(),
    414                     mOffset > 0);
    415                 bytesWritten += mOffset;
    416 
    417                 // Play for a while.
    418                 mTrack.play();
    419 
    420                 log(TEST_NAME, "native rate = "
    421                         + mTrack.getNativeOutputSampleRate(mTrack.getStreamType()));
    422                 long elapsedMillis = 0;
    423                 long startTime = System.currentTimeMillis();
    424                 while (elapsedMillis < TEST_DURATION_MILLIS) {
    425                     writeBlock(mBlockSize);
    426                     elapsedMillis = System.currentTimeMillis() - startTime;
    427                     mTimestampAnalyzer.addTimestamp(mTrack);
    428                 }
    429 
    430                 // Did we underrun? Allow 0 or 1 because there is sometimes
    431                 // an underrun on startup.
    432                 int underrunCount1 = mTrack.getUnderrunCount();
    433                 assertTrue(TEST_NAME + ": too many underruns, got underrunCount1" + getPcmWarning(),
    434                         underrunCount1 < 2);
    435 
    436                 // Estimate the sample rate and compare it with expected.
    437                 double estimatedRate = mTimestampAnalyzer.estimateSampleRate();
    438                 Log.d(TAG, "measured sample rate = " + estimatedRate);
    439                 assertEquals(TEST_NAME + ": measured sample rate" + getPcmWarning(),
    440                         mSampleRate, estimatedRate, mSampleRate * MAX_RATE_TOLERANCE_FRACTION);
    441 
    442                 // Check for jitter or retrograde motion in each timestamp.
    443                 mTimestampAnalyzer.checkIndividualTimestamps(mSampleRate);
    444 
    445             } finally {
    446                 mTrack.release();
    447             }
    448         }
    449     }
    450 
    451     // Create player for short[]
    452     class SamplePlayerShorts extends SamplePlayerBase {
    453         private final short[] mData;
    454 
    455         SamplePlayerShorts(int sampleRate, int encoding, int channelConfig) {
    456             super(sampleRate, encoding, channelConfig);
    457             mData = new short[64 * 1024];
    458             // Fill with noise. We should not hear the noise for IEC61937.
    459             int amplitude = 8000;
    460             Random random = new Random();
    461             for (int i = 0; i < mData.length; i++) {
    462                 mData[i] = (short)(random.nextInt(amplitude) - (amplitude / 2));
    463             }
    464         }
    465 
    466         SamplePlayerShorts(int sampleRate, int encoding, int channelConfig, @RawRes int resourceId)
    467                 throws Exception {
    468             super(sampleRate, encoding, channelConfig);
    469             mData = loadRawResourceShorts(resourceId);
    470             assertTrue("SamplePlayerShorts: load resource file as shorts", mData.length > 0);
    471         }
    472 
    473         @Override
    474         protected int writeBlock(int numShorts) {
    475             int result = 0;
    476             int shortsToWrite = numShorts;
    477             int shortsLeft = mData.length - mOffset;
    478             if (shortsToWrite > shortsLeft) {
    479                 shortsToWrite = shortsLeft;
    480             }
    481             if (shortsToWrite > 0) {
    482                 result = mTrack.write(mData, mOffset, shortsToWrite);
    483                 mOffset += result;
    484             } else {
    485                 mOffset = 0; // rewind
    486             }
    487             return result;
    488         }
    489     }
    490 
    491     // Create player for byte[]
    492     class SamplePlayerBytes extends SamplePlayerBase {
    493         private final byte[] mData;
    494 
    495         SamplePlayerBytes(int sampleRate, int encoding, int channelConfig) {
    496             super(sampleRate, encoding, channelConfig);
    497             mData = new byte[128 * 1024];
    498         }
    499 
    500         SamplePlayerBytes(int sampleRate, int encoding, int channelConfig, @RawRes int resourceId)
    501                 throws Exception {
    502             super(sampleRate, encoding, channelConfig);
    503             mData = loadRawResourceBytes(resourceId);
    504             assertTrue("SamplePlayerBytes: load resource file as bytes", mData.length > 0);
    505         }
    506 
    507         @Override
    508         protected int writeBlock(int numBytes) {
    509             int result = 0;
    510             int bytesToWrite = numBytes;
    511             int bytesLeft = mData.length - mOffset;
    512             if (bytesToWrite > bytesLeft) {
    513                 bytesToWrite = bytesLeft;
    514             }
    515             if (bytesToWrite > 0) {
    516                 result = mTrack.write(mData, mOffset, bytesToWrite);
    517                 mOffset += result;
    518             } else {
    519                 mOffset = 0; // rewind
    520             }
    521             return result;
    522         }
    523     }
    524 
    525     public void testPlayAC3Bytes() throws Exception {
    526         if (mInfoAC3 != null) {
    527             SamplePlayerBytes player = new SamplePlayerBytes(
    528                     48000, AudioFormat.ENCODING_AC3, AudioFormat.CHANNEL_OUT_STEREO,
    529                     RES_AC3_VOICE_48000);
    530             player.playAndMeasureRate();
    531         }
    532     }
    533 
    534     public void testPlayAC3Shorts() throws Exception {
    535         if (mInfoAC3 != null) {
    536             SamplePlayerShorts player = new SamplePlayerShorts(
    537                     48000, AudioFormat.ENCODING_AC3, AudioFormat.CHANNEL_OUT_STEREO,
    538                     RES_AC3_VOICE_48000);
    539             player.playAndMeasureRate();
    540         }
    541     }
    542 
    543     // Note that for testing IEC61937, the Audio framework does not look at the
    544     // wrapped data. It just passes it through over HDMI. See we can just use
    545     // zeros instead of real data.
    546     public void testPlayIEC61937_32000() throws Exception {
    547         if (mInfoIEC61937 != null) {
    548             SamplePlayerShorts player = new SamplePlayerShorts(
    549                     32000, AudioFormat.ENCODING_IEC61937, AudioFormat.CHANNEL_OUT_STEREO);
    550             player.playAndMeasureRate();
    551         }
    552     }
    553 
    554     public void testPlayIEC61937_44100() throws Exception {
    555         if (mInfoIEC61937 != null) {
    556             SamplePlayerShorts player = new SamplePlayerShorts(
    557                     44100, AudioFormat.ENCODING_IEC61937, AudioFormat.CHANNEL_OUT_STEREO);
    558             player.playAndMeasureRate();
    559         }
    560     }
    561 
    562     public void testPlayIEC61937_48000() throws Exception {
    563         if (mInfoIEC61937 != null) {
    564             SamplePlayerShorts player = new SamplePlayerShorts(
    565                     48000, AudioFormat.ENCODING_IEC61937, AudioFormat.CHANNEL_OUT_STEREO);
    566             player.playAndMeasureRate();
    567         }
    568     }
    569 
    570     public void testIEC61937_Errors() throws Exception {
    571         if (mInfoIEC61937 != null) {
    572             final String TEST_NAME = "testIEC61937_Errors";
    573             try {
    574                 AudioTrack track = createAudioTrack(48000, AudioFormat.ENCODING_IEC61937,
    575                         AudioFormat.CHANNEL_OUT_MONO);
    576                 assertTrue(TEST_NAME + ": IEC61937 track creation should fail for mono", false);
    577             } catch (IllegalArgumentException e) {
    578                 // This is expected behavior.
    579             }
    580 
    581             try {
    582                 AudioTrack track = createAudioTrack(48000, AudioFormat.ENCODING_IEC61937,
    583                         AudioFormat.CHANNEL_OUT_5POINT1);
    584                 assertTrue(TEST_NAME + ": IEC61937 track creation should fail for 5.1", false);
    585             } catch (IllegalArgumentException e) {
    586                 // This is expected behavior.
    587             }
    588         }
    589     }
    590 
    591     public void testPcmSupport() throws Exception {
    592         if (REQUIRE_PCM_DEVICE) {
    593             // There should always be a dummy PCM device available.
    594             assertTrue("testPcmSupport: PCM should be supported."
    595                     + " On ATV device please check HDMI connection.",
    596                     mInfoPCM16 != null);
    597         }
    598     }
    599 
    600     private boolean isPcmTestingEnabled() {
    601         return (mInfoPCM16 != null || !REQUIRE_PCM_DEVICE);
    602     }
    603 
    604     public void testPlaySineSweepShorts() throws Exception {
    605         if (isPcmTestingEnabled()) {
    606             SamplePlayerShorts player = new SamplePlayerShorts(
    607                     44100, AudioFormat.ENCODING_PCM_16BIT, AudioFormat.CHANNEL_OUT_STEREO,
    608                     R.raw.sinesweepraw);
    609             player.playAndMeasureRate();
    610         }
    611     }
    612 
    613     public void testPlaySineSweepBytes() throws Exception {
    614         if (isPcmTestingEnabled()) {
    615             SamplePlayerBytes player = new SamplePlayerBytes(
    616                     44100, AudioFormat.ENCODING_PCM_16BIT, AudioFormat.CHANNEL_OUT_STEREO,
    617                     R.raw.sinesweepraw);
    618             player.playAndMeasureRate();
    619         }
    620     }
    621 
    622     public void testPlaySineSweepBytes48000() throws Exception {
    623         if (isPcmTestingEnabled()) {
    624             SamplePlayerBytes player = new SamplePlayerBytes(
    625                     48000, AudioFormat.ENCODING_PCM_16BIT, AudioFormat.CHANNEL_OUT_STEREO,
    626                     R.raw.sinesweepraw);
    627             player.playAndMeasureRate();
    628         }
    629     }
    630 
    631     public void testPlaySineSweepShortsMono() throws Exception {
    632         if (isPcmTestingEnabled()) {
    633             SamplePlayerShorts player = new SamplePlayerShorts(44100, AudioFormat.ENCODING_PCM_16BIT,
    634                     AudioFormat.CHANNEL_OUT_MONO,
    635                     R.raw.sinesweepraw);
    636             player.playAndMeasureRate();
    637         }
    638     }
    639 
    640     public void testPlaySineSweepBytesMono()
    641             throws Exception {
    642         if (isPcmTestingEnabled()) {
    643             SamplePlayerBytes player = new SamplePlayerBytes(44100,
    644                     AudioFormat.ENCODING_PCM_16BIT, AudioFormat.CHANNEL_OUT_MONO, R.raw.sinesweepraw);
    645             player.playAndMeasureRate();
    646         }
    647     }
    648 
    649 }
    650