Home | History | Annotate | Download | only in media
      1 /*
      2  * Copyright (C) 2008 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.media;
     18 
     19 import java.io.IOException;
     20 import java.lang.annotation.Retention;
     21 import java.lang.annotation.RetentionPolicy;
     22 import java.lang.ref.WeakReference;
     23 import java.nio.ByteBuffer;
     24 import java.util.Collection;
     25 import java.util.Iterator;
     26 import java.util.ArrayList;
     27 import java.util.List;
     28 
     29 import android.annotation.IntDef;
     30 import android.annotation.NonNull;
     31 import android.annotation.SystemApi;
     32 import android.app.ActivityThread;
     33 import android.os.Binder;
     34 import android.os.Handler;
     35 import android.os.IBinder;
     36 import android.os.Looper;
     37 import android.os.Message;
     38 import android.os.PersistableBundle;
     39 import android.os.RemoteException;
     40 import android.os.ServiceManager;
     41 import android.text.TextUtils;
     42 import android.util.ArrayMap;
     43 import android.util.Log;
     44 import android.util.Pair;
     45 
     46 import com.android.internal.annotations.GuardedBy;
     47 
     48 /**
     49  * The AudioRecord class manages the audio resources for Java applications
     50  * to record audio from the audio input hardware of the platform. This is
     51  * achieved by "pulling" (reading) the data from the AudioRecord object. The
     52  * application is responsible for polling the AudioRecord object in time using one of
     53  * the following three methods:  {@link #read(byte[],int, int)}, {@link #read(short[], int, int)}
     54  * or {@link #read(ByteBuffer, int)}. The choice of which method to use will be based
     55  * on the audio data storage format that is the most convenient for the user of AudioRecord.
     56  * <p>Upon creation, an AudioRecord object initializes its associated audio buffer that it will
     57  * fill with the new audio data. The size of this buffer, specified during the construction,
     58  * determines how long an AudioRecord can record before "over-running" data that has not
     59  * been read yet. Data should be read from the audio hardware in chunks of sizes inferior to
     60  * the total recording buffer size.
     61  */
     62 public class AudioRecord implements AudioRouting
     63 {
     64     //---------------------------------------------------------
     65     // Constants
     66     //--------------------
     67 
     68 
     69     /**
     70      *  indicates AudioRecord state is not successfully initialized.
     71      */
     72     public static final int STATE_UNINITIALIZED = 0;
     73     /**
     74      *  indicates AudioRecord state is ready to be used
     75      */
     76     public static final int STATE_INITIALIZED   = 1;
     77 
     78     /**
     79      * indicates AudioRecord recording state is not recording
     80      */
     81     public static final int RECORDSTATE_STOPPED = 1;  // matches SL_RECORDSTATE_STOPPED
     82     /**
     83      * indicates AudioRecord recording state is recording
     84      */
     85     public static final int RECORDSTATE_RECORDING = 3;// matches SL_RECORDSTATE_RECORDING
     86 
     87     /**
     88      * Denotes a successful operation.
     89      */
     90     public  static final int SUCCESS                               = AudioSystem.SUCCESS;
     91     /**
     92      * Denotes a generic operation failure.
     93      */
     94     public  static final int ERROR                                 = AudioSystem.ERROR;
     95     /**
     96      * Denotes a failure due to the use of an invalid value.
     97      */
     98     public  static final int ERROR_BAD_VALUE                       = AudioSystem.BAD_VALUE;
     99     /**
    100      * Denotes a failure due to the improper use of a method.
    101      */
    102     public  static final int ERROR_INVALID_OPERATION               = AudioSystem.INVALID_OPERATION;
    103     /**
    104      * An error code indicating that the object reporting it is no longer valid and needs to
    105      * be recreated.
    106      */
    107     public  static final int ERROR_DEAD_OBJECT                     = AudioSystem.DEAD_OBJECT;
    108 
    109     // Error codes:
    110     // to keep in sync with frameworks/base/core/jni/android_media_AudioRecord.cpp
    111     private static final int AUDIORECORD_ERROR_SETUP_ZEROFRAMECOUNT      = -16;
    112     private static final int AUDIORECORD_ERROR_SETUP_INVALIDCHANNELMASK  = -17;
    113     private static final int AUDIORECORD_ERROR_SETUP_INVALIDFORMAT       = -18;
    114     private static final int AUDIORECORD_ERROR_SETUP_INVALIDSOURCE       = -19;
    115     private static final int AUDIORECORD_ERROR_SETUP_NATIVEINITFAILED    = -20;
    116 
    117     // Events:
    118     // to keep in sync with frameworks/av/include/media/AudioRecord.h
    119     /**
    120      * Event id denotes when record head has reached a previously set marker.
    121      */
    122     private static final int NATIVE_EVENT_MARKER  = 2;
    123     /**
    124      * Event id denotes when previously set update period has elapsed during recording.
    125      */
    126     private static final int NATIVE_EVENT_NEW_POS = 3;
    127 
    128     private final static String TAG = "android.media.AudioRecord";
    129 
    130     /** @hide */
    131     public final static String SUBMIX_FIXED_VOLUME = "fixedVolume";
    132 
    133     /** @hide */
    134     @IntDef({
    135         READ_BLOCKING,
    136         READ_NON_BLOCKING
    137     })
    138     @Retention(RetentionPolicy.SOURCE)
    139     public @interface ReadMode {}
    140 
    141     /**
    142      * The read mode indicating the read operation will block until all data
    143      * requested has been read.
    144      */
    145     public final static int READ_BLOCKING = 0;
    146 
    147     /**
    148      * The read mode indicating the read operation will return immediately after
    149      * reading as much audio data as possible without blocking.
    150      */
    151     public final static int READ_NON_BLOCKING = 1;
    152 
    153     //---------------------------------------------------------
    154     // Used exclusively by native code
    155     //--------------------
    156     /**
    157      * Accessed by native methods: provides access to C++ AudioRecord object
    158      */
    159     @SuppressWarnings("unused")
    160     private long mNativeRecorderInJavaObj;
    161 
    162     /**
    163      * Accessed by native methods: provides access to the callback data.
    164      */
    165     @SuppressWarnings("unused")
    166     private long mNativeCallbackCookie;
    167 
    168     /**
    169      * Accessed by native methods: provides access to the JNIDeviceCallback instance.
    170      */
    171     @SuppressWarnings("unused")
    172     private long mNativeDeviceCallback;
    173 
    174 
    175     //---------------------------------------------------------
    176     // Member variables
    177     //--------------------
    178     /**
    179      * The audio data sampling rate in Hz.
    180      * Never {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED}.
    181      */
    182     private int mSampleRate; // initialized by all constructors via audioParamCheck()
    183     /**
    184      * The number of input audio channels (1 is mono, 2 is stereo)
    185      */
    186     private int mChannelCount;
    187     /**
    188      * The audio channel position mask
    189      */
    190     private int mChannelMask;
    191     /**
    192      * The audio channel index mask
    193      */
    194     private int mChannelIndexMask;
    195     /**
    196      * The encoding of the audio samples.
    197      * @see AudioFormat#ENCODING_PCM_8BIT
    198      * @see AudioFormat#ENCODING_PCM_16BIT
    199      * @see AudioFormat#ENCODING_PCM_FLOAT
    200      */
    201     private int mAudioFormat;
    202     /**
    203      * Where the audio data is recorded from.
    204      */
    205     private int mRecordSource;
    206     /**
    207      * Indicates the state of the AudioRecord instance.
    208      */
    209     private int mState = STATE_UNINITIALIZED;
    210     /**
    211      * Indicates the recording state of the AudioRecord instance.
    212      */
    213     private int mRecordingState = RECORDSTATE_STOPPED;
    214     /**
    215      * Lock to make sure mRecordingState updates are reflecting the actual state of the object.
    216      */
    217     private final Object mRecordingStateLock = new Object();
    218     /**
    219      * The listener the AudioRecord notifies when the record position reaches a marker
    220      * or for periodic updates during the progression of the record head.
    221      *  @see #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)
    222      *  @see #setRecordPositionUpdateListener(OnRecordPositionUpdateListener, Handler)
    223      */
    224     private OnRecordPositionUpdateListener mPositionListener = null;
    225     /**
    226      * Lock to protect position listener updates against event notifications
    227      */
    228     private final Object mPositionListenerLock = new Object();
    229     /**
    230      * Handler for marker events coming from the native code
    231      */
    232     private NativeEventHandler mEventHandler = null;
    233     /**
    234      * Looper associated with the thread that creates the AudioRecord instance
    235      */
    236     private Looper mInitializationLooper = null;
    237     /**
    238      * Size of the native audio buffer.
    239      */
    240     private int mNativeBufferSizeInBytes = 0;
    241     /**
    242      * Audio session ID
    243      */
    244     private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
    245     /**
    246      * AudioAttributes
    247      */
    248     private AudioAttributes mAudioAttributes;
    249     private boolean mIsSubmixFullVolume = false;
    250 
    251     //---------------------------------------------------------
    252     // Constructor, Finalize
    253     //--------------------
    254     /**
    255      * Class constructor.
    256      * Though some invalid parameters will result in an {@link IllegalArgumentException} exception,
    257      * other errors do not.  Thus you should call {@link #getState()} immediately after construction
    258      * to confirm that the object is usable.
    259      * @param audioSource the recording source.
    260      *   See {@link MediaRecorder.AudioSource} for the recording source definitions.
    261      * @param sampleRateInHz the sample rate expressed in Hertz. 44100Hz is currently the only
    262      *   rate that is guaranteed to work on all devices, but other rates such as 22050,
    263      *   16000, and 11025 may work on some devices.
    264      *   {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} means to use a route-dependent value
    265      *   which is usually the sample rate of the source.
    266      *   {@link #getSampleRate()} can be used to retrieve the actual sample rate chosen.
    267      * @param channelConfig describes the configuration of the audio channels.
    268      *   See {@link AudioFormat#CHANNEL_IN_MONO} and
    269      *   {@link AudioFormat#CHANNEL_IN_STEREO}.  {@link AudioFormat#CHANNEL_IN_MONO} is guaranteed
    270      *   to work on all devices.
    271      * @param audioFormat the format in which the audio data is to be returned.
    272      *   See {@link AudioFormat#ENCODING_PCM_8BIT}, {@link AudioFormat#ENCODING_PCM_16BIT},
    273      *   and {@link AudioFormat#ENCODING_PCM_FLOAT}.
    274      * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is written
    275      *   to during the recording. New audio data can be read from this buffer in smaller chunks
    276      *   than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
    277      *   required buffer size for the successful creation of an AudioRecord instance. Using values
    278      *   smaller than getMinBufferSize() will result in an initialization failure.
    279      * @throws java.lang.IllegalArgumentException
    280      */
    281     public AudioRecord(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat,
    282             int bufferSizeInBytes)
    283     throws IllegalArgumentException {
    284         this((new AudioAttributes.Builder())
    285                     .setInternalCapturePreset(audioSource)
    286                     .build(),
    287                 (new AudioFormat.Builder())
    288                     .setChannelMask(getChannelMaskFromLegacyConfig(channelConfig,
    289                                         true/*allow legacy configurations*/))
    290                     .setEncoding(audioFormat)
    291                     .setSampleRate(sampleRateInHz)
    292                     .build(),
    293                 bufferSizeInBytes,
    294                 AudioManager.AUDIO_SESSION_ID_GENERATE);
    295     }
    296 
    297     /**
    298      * @hide
    299      * Class constructor with {@link AudioAttributes} and {@link AudioFormat}.
    300      * @param attributes a non-null {@link AudioAttributes} instance. Use
    301      *     {@link AudioAttributes.Builder#setAudioSource(int)} for configuring the audio
    302      *     source for this instance.
    303      * @param format a non-null {@link AudioFormat} instance describing the format of the data
    304      *     that will be recorded through this AudioRecord. See {@link AudioFormat.Builder} for
    305      *     configuring the audio format parameters such as encoding, channel mask and sample rate.
    306      * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is written
    307      *   to during the recording. New audio data can be read from this buffer in smaller chunks
    308      *   than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
    309      *   required buffer size for the successful creation of an AudioRecord instance. Using values
    310      *   smaller than getMinBufferSize() will result in an initialization failure.
    311      * @param sessionId ID of audio session the AudioRecord must be attached to, or
    312      *   {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at construction
    313      *   time. See also {@link AudioManager#generateAudioSessionId()} to obtain a session ID before
    314      *   construction.
    315      * @throws IllegalArgumentException
    316      */
    317     @SystemApi
    318     public AudioRecord(AudioAttributes attributes, AudioFormat format, int bufferSizeInBytes,
    319             int sessionId) throws IllegalArgumentException {
    320         mRecordingState = RECORDSTATE_STOPPED;
    321 
    322         if (attributes == null) {
    323             throw new IllegalArgumentException("Illegal null AudioAttributes");
    324         }
    325         if (format == null) {
    326             throw new IllegalArgumentException("Illegal null AudioFormat");
    327         }
    328 
    329         // remember which looper is associated with the AudioRecord instanciation
    330         if ((mInitializationLooper = Looper.myLooper()) == null) {
    331             mInitializationLooper = Looper.getMainLooper();
    332         }
    333 
    334         // is this AudioRecord using REMOTE_SUBMIX at full volume?
    335         if (attributes.getCapturePreset() == MediaRecorder.AudioSource.REMOTE_SUBMIX) {
    336             final AudioAttributes.Builder filteredAttr = new AudioAttributes.Builder();
    337             final Iterator<String> tagsIter = attributes.getTags().iterator();
    338             while (tagsIter.hasNext()) {
    339                 final String tag = tagsIter.next();
    340                 if (tag.equalsIgnoreCase(SUBMIX_FIXED_VOLUME)) {
    341                     mIsSubmixFullVolume = true;
    342                     Log.v(TAG, "Will record from REMOTE_SUBMIX at full fixed volume");
    343                 } else { // SUBMIX_FIXED_VOLUME: is not to be propagated to the native layers
    344                     filteredAttr.addTag(tag);
    345                 }
    346             }
    347             filteredAttr.setInternalCapturePreset(attributes.getCapturePreset());
    348             mAudioAttributes = filteredAttr.build();
    349         } else {
    350             mAudioAttributes = attributes;
    351         }
    352 
    353         int rate = format.getSampleRate();
    354         if (rate == AudioFormat.SAMPLE_RATE_UNSPECIFIED) {
    355             rate = 0;
    356         }
    357 
    358         int encoding = AudioFormat.ENCODING_DEFAULT;
    359         if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0)
    360         {
    361             encoding = format.getEncoding();
    362         }
    363 
    364         audioParamCheck(attributes.getCapturePreset(), rate, encoding);
    365 
    366         if ((format.getPropertySetMask()
    367                 & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0) {
    368             mChannelIndexMask = format.getChannelIndexMask();
    369             mChannelCount = format.getChannelCount();
    370         }
    371         if ((format.getPropertySetMask()
    372                 & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0) {
    373             mChannelMask = getChannelMaskFromLegacyConfig(format.getChannelMask(), false);
    374             mChannelCount = format.getChannelCount();
    375         } else if (mChannelIndexMask == 0) {
    376             mChannelMask = getChannelMaskFromLegacyConfig(AudioFormat.CHANNEL_IN_DEFAULT, false);
    377             mChannelCount =  AudioFormat.channelCountFromInChannelMask(mChannelMask);
    378         }
    379 
    380         audioBuffSizeCheck(bufferSizeInBytes);
    381 
    382         int[] sampleRate = new int[] {mSampleRate};
    383         int[] session = new int[1];
    384         session[0] = sessionId;
    385         //TODO: update native initialization when information about hardware init failure
    386         //      due to capture device already open is available.
    387         int initResult = native_setup( new WeakReference<AudioRecord>(this),
    388                 mAudioAttributes, sampleRate, mChannelMask, mChannelIndexMask,
    389                 mAudioFormat, mNativeBufferSizeInBytes,
    390                 session, ActivityThread.currentOpPackageName(), 0 /*nativeRecordInJavaObj*/);
    391         if (initResult != SUCCESS) {
    392             loge("Error code "+initResult+" when initializing native AudioRecord object.");
    393             return; // with mState == STATE_UNINITIALIZED
    394         }
    395 
    396         mSampleRate = sampleRate[0];
    397         mSessionId = session[0];
    398 
    399         mState = STATE_INITIALIZED;
    400     }
    401 
    402     /**
    403      * A constructor which explicitly connects a Native (C++) AudioRecord. For use by
    404      * the AudioRecordRoutingProxy subclass.
    405      * @param nativeRecordInJavaObj A C/C++ pointer to a native AudioRecord
    406      * (associated with an OpenSL ES recorder). Note: the caller must ensure a correct
    407      * value here as no error checking is or can be done.
    408      */
    409     /*package*/ AudioRecord(long nativeRecordInJavaObj) {
    410         mNativeRecorderInJavaObj = 0;
    411         mNativeCallbackCookie = 0;
    412         mNativeDeviceCallback = 0;
    413 
    414         // other initialization...
    415         if (nativeRecordInJavaObj != 0) {
    416             deferred_connect(nativeRecordInJavaObj);
    417         } else {
    418             mState = STATE_UNINITIALIZED;
    419         }
    420     }
    421 
    422     /**
    423      * @hide
    424      */
    425     /* package */ void deferred_connect(long  nativeRecordInJavaObj) {
    426         if (mState != STATE_INITIALIZED) {
    427             int[] session = { 0 };
    428             int[] rates = { 0 };
    429             //TODO: update native initialization when information about hardware init failure
    430             //      due to capture device already open is available.
    431             // Note that for this native_setup, we are providing an already created/initialized
    432             // *Native* AudioRecord, so the attributes parameters to native_setup() are ignored.
    433             int initResult = native_setup(new WeakReference<AudioRecord>(this),
    434                     null /*mAudioAttributes*/,
    435                     rates /*mSampleRates*/,
    436                     0 /*mChannelMask*/,
    437                     0 /*mChannelIndexMask*/,
    438                     0 /*mAudioFormat*/,
    439                     0 /*mNativeBufferSizeInBytes*/,
    440                     session,
    441                     ActivityThread.currentOpPackageName(),
    442                     nativeRecordInJavaObj);
    443             if (initResult != SUCCESS) {
    444                 loge("Error code "+initResult+" when initializing native AudioRecord object.");
    445                 return; // with mState == STATE_UNINITIALIZED
    446             }
    447 
    448             mSessionId = session[0];
    449 
    450             mState = STATE_INITIALIZED;
    451         }
    452     }
    453 
    454     /**
    455      * Builder class for {@link AudioRecord} objects.
    456      * Use this class to configure and create an <code>AudioRecord</code> instance. By setting the
    457      * recording source and audio format parameters, you indicate which of
    458      * those vary from the default behavior on the device.
    459      * <p> Here is an example where <code>Builder</code> is used to specify all {@link AudioFormat}
    460      * parameters, to be used by a new <code>AudioRecord</code> instance:
    461      *
    462      * <pre class="prettyprint">
    463      * AudioRecord recorder = new AudioRecord.Builder()
    464      *         .setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION)
    465      *         .setAudioFormat(new AudioFormat.Builder()
    466      *                 .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
    467      *                 .setSampleRate(32000)
    468      *                 .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
    469      *                 .build())
    470      *         .setBufferSize(2*minBuffSize)
    471      *         .build();
    472      * </pre>
    473      * <p>
    474      * If the audio source is not set with {@link #setAudioSource(int)},
    475      * {@link MediaRecorder.AudioSource#DEFAULT} is used.
    476      * <br>If the audio format is not specified or is incomplete, its channel configuration will be
    477      * {@link AudioFormat#CHANNEL_IN_MONO}, and the encoding will be
    478      * {@link AudioFormat#ENCODING_PCM_16BIT}.
    479      * The sample rate will depend on the device actually selected for capture and can be queried
    480      * with {@link #getSampleRate()} method.
    481      * <br>If the buffer size is not specified with {@link #setBufferSizeInBytes(int)},
    482      * the minimum buffer size for the source is used.
    483      */
    484     public static class Builder {
    485         private AudioAttributes mAttributes;
    486         private AudioFormat mFormat;
    487         private int mBufferSizeInBytes;
    488         private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
    489 
    490         /**
    491          * Constructs a new Builder with the default values as described above.
    492          */
    493         public Builder() {
    494         }
    495 
    496         /**
    497          * @param source the audio source.
    498          * See {@link MediaRecorder.AudioSource} for the supported audio source definitions.
    499          * @return the same Builder instance.
    500          * @throws IllegalArgumentException
    501          */
    502         public Builder setAudioSource(int source) throws IllegalArgumentException {
    503             if ( (source < MediaRecorder.AudioSource.DEFAULT) ||
    504                     (source > MediaRecorder.getAudioSourceMax()) ) {
    505                 throw new IllegalArgumentException("Invalid audio source " + source);
    506             }
    507             mAttributes = new AudioAttributes.Builder()
    508                     .setInternalCapturePreset(source)
    509                     .build();
    510             return this;
    511         }
    512 
    513         /**
    514          * @hide
    515          * To be only used by system components. Allows specifying non-public capture presets
    516          * @param attributes a non-null {@link AudioAttributes} instance that contains the capture
    517          *     preset to be used.
    518          * @return the same Builder instance.
    519          * @throws IllegalArgumentException
    520          */
    521         @SystemApi
    522         public Builder setAudioAttributes(@NonNull AudioAttributes attributes)
    523                 throws IllegalArgumentException {
    524             if (attributes == null) {
    525                 throw new IllegalArgumentException("Illegal null AudioAttributes argument");
    526             }
    527             if (attributes.getCapturePreset() == MediaRecorder.AudioSource.AUDIO_SOURCE_INVALID) {
    528                 throw new IllegalArgumentException(
    529                         "No valid capture preset in AudioAttributes argument");
    530             }
    531             // keep reference, we only copy the data when building
    532             mAttributes = attributes;
    533             return this;
    534         }
    535 
    536         /**
    537          * Sets the format of the audio data to be captured.
    538          * @param format a non-null {@link AudioFormat} instance
    539          * @return the same Builder instance.
    540          * @throws IllegalArgumentException
    541          */
    542         public Builder setAudioFormat(@NonNull AudioFormat format) throws IllegalArgumentException {
    543             if (format == null) {
    544                 throw new IllegalArgumentException("Illegal null AudioFormat argument");
    545             }
    546             // keep reference, we only copy the data when building
    547             mFormat = format;
    548             return this;
    549         }
    550 
    551         /**
    552          * Sets the total size (in bytes) of the buffer where audio data is written
    553          * during the recording. New audio data can be read from this buffer in smaller chunks
    554          * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
    555          * required buffer size for the successful creation of an AudioRecord instance.
    556          * Since bufferSizeInBytes may be internally increased to accommodate the source
    557          * requirements, use {@link #getBufferSizeInFrames()} to determine the actual buffer size
    558          * in frames.
    559          * @param bufferSizeInBytes a value strictly greater than 0
    560          * @return the same Builder instance.
    561          * @throws IllegalArgumentException
    562          */
    563         public Builder setBufferSizeInBytes(int bufferSizeInBytes) throws IllegalArgumentException {
    564             if (bufferSizeInBytes <= 0) {
    565                 throw new IllegalArgumentException("Invalid buffer size " + bufferSizeInBytes);
    566             }
    567             mBufferSizeInBytes = bufferSizeInBytes;
    568             return this;
    569         }
    570 
    571         /**
    572          * @hide
    573          * To be only used by system components.
    574          * @param sessionId ID of audio session the AudioRecord must be attached to, or
    575          *     {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at
    576          *     construction time.
    577          * @return the same Builder instance.
    578          * @throws IllegalArgumentException
    579          */
    580         @SystemApi
    581         public Builder setSessionId(int sessionId) throws IllegalArgumentException {
    582             if (sessionId < 0) {
    583                 throw new IllegalArgumentException("Invalid session ID " + sessionId);
    584             }
    585             mSessionId = sessionId;
    586             return this;
    587         }
    588 
    589         /**
    590          * @return a new {@link AudioRecord} instance successfully initialized with all
    591          *     the parameters set on this <code>Builder</code>.
    592          * @throws UnsupportedOperationException if the parameters set on the <code>Builder</code>
    593          *     were incompatible, or if they are not supported by the device,
    594          *     or if the device was not available.
    595          */
    596         public AudioRecord build() throws UnsupportedOperationException {
    597             if (mFormat == null) {
    598                 mFormat = new AudioFormat.Builder()
    599                         .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
    600                         .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
    601                         .build();
    602             } else {
    603                 if (mFormat.getEncoding() == AudioFormat.ENCODING_INVALID) {
    604                     mFormat = new AudioFormat.Builder(mFormat)
    605                             .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
    606                             .build();
    607                 }
    608                 if (mFormat.getChannelMask() == AudioFormat.CHANNEL_INVALID
    609                         && mFormat.getChannelIndexMask() == AudioFormat.CHANNEL_INVALID) {
    610                     mFormat = new AudioFormat.Builder(mFormat)
    611                             .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
    612                             .build();
    613                 }
    614             }
    615             if (mAttributes == null) {
    616                 mAttributes = new AudioAttributes.Builder()
    617                         .setInternalCapturePreset(MediaRecorder.AudioSource.DEFAULT)
    618                         .build();
    619             }
    620             try {
    621                 // If the buffer size is not specified,
    622                 // use a single frame for the buffer size and let the
    623                 // native code figure out the minimum buffer size.
    624                 if (mBufferSizeInBytes == 0) {
    625                     mBufferSizeInBytes = mFormat.getChannelCount()
    626                             * mFormat.getBytesPerSample(mFormat.getEncoding());
    627                 }
    628                 final AudioRecord record = new AudioRecord(
    629                         mAttributes, mFormat, mBufferSizeInBytes, mSessionId);
    630                 if (record.getState() == STATE_UNINITIALIZED) {
    631                     // release is not necessary
    632                     throw new UnsupportedOperationException("Cannot create AudioRecord");
    633                 }
    634                 return record;
    635             } catch (IllegalArgumentException e) {
    636                 throw new UnsupportedOperationException(e.getMessage());
    637             }
    638         }
    639     }
    640 
    641     // Convenience method for the constructor's parameter checks.
    642     // This, getChannelMaskFromLegacyConfig and audioBuffSizeCheck are where constructor
    643     // IllegalArgumentException-s are thrown
    644     private static int getChannelMaskFromLegacyConfig(int inChannelConfig,
    645             boolean allowLegacyConfig) {
    646         int mask;
    647         switch (inChannelConfig) {
    648         case AudioFormat.CHANNEL_IN_DEFAULT: // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
    649         case AudioFormat.CHANNEL_IN_MONO:
    650         case AudioFormat.CHANNEL_CONFIGURATION_MONO:
    651             mask = AudioFormat.CHANNEL_IN_MONO;
    652             break;
    653         case AudioFormat.CHANNEL_IN_STEREO:
    654         case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
    655             mask = AudioFormat.CHANNEL_IN_STEREO;
    656             break;
    657         case (AudioFormat.CHANNEL_IN_FRONT | AudioFormat.CHANNEL_IN_BACK):
    658             mask = inChannelConfig;
    659             break;
    660         default:
    661             throw new IllegalArgumentException("Unsupported channel configuration.");
    662         }
    663 
    664         if (!allowLegacyConfig && ((inChannelConfig == AudioFormat.CHANNEL_CONFIGURATION_MONO)
    665                 || (inChannelConfig == AudioFormat.CHANNEL_CONFIGURATION_STEREO))) {
    666             // only happens with the constructor that uses AudioAttributes and AudioFormat
    667             throw new IllegalArgumentException("Unsupported deprecated configuration.");
    668         }
    669 
    670         return mask;
    671     }
    672 
    673     // postconditions:
    674     //    mRecordSource is valid
    675     //    mAudioFormat is valid
    676     //    mSampleRate is valid
    677     private void audioParamCheck(int audioSource, int sampleRateInHz, int audioFormat)
    678             throws IllegalArgumentException {
    679 
    680         //--------------
    681         // audio source
    682         if ( (audioSource < MediaRecorder.AudioSource.DEFAULT) ||
    683              ((audioSource > MediaRecorder.getAudioSourceMax()) &&
    684               (audioSource != MediaRecorder.AudioSource.RADIO_TUNER) &&
    685               (audioSource != MediaRecorder.AudioSource.HOTWORD)) )  {
    686             throw new IllegalArgumentException("Invalid audio source " + audioSource);
    687         }
    688         mRecordSource = audioSource;
    689 
    690         //--------------
    691         // sample rate
    692         if ((sampleRateInHz < AudioFormat.SAMPLE_RATE_HZ_MIN ||
    693                 sampleRateInHz > AudioFormat.SAMPLE_RATE_HZ_MAX) &&
    694                 sampleRateInHz != AudioFormat.SAMPLE_RATE_UNSPECIFIED) {
    695             throw new IllegalArgumentException(sampleRateInHz
    696                     + "Hz is not a supported sample rate.");
    697         }
    698         mSampleRate = sampleRateInHz;
    699 
    700         //--------------
    701         // audio format
    702         switch (audioFormat) {
    703         case AudioFormat.ENCODING_DEFAULT:
    704             mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
    705             break;
    706         case AudioFormat.ENCODING_PCM_FLOAT:
    707         case AudioFormat.ENCODING_PCM_16BIT:
    708         case AudioFormat.ENCODING_PCM_8BIT:
    709             mAudioFormat = audioFormat;
    710             break;
    711         default:
    712             throw new IllegalArgumentException("Unsupported sample encoding " + audioFormat
    713                     + ". Should be ENCODING_PCM_8BIT, ENCODING_PCM_16BIT, or ENCODING_PCM_FLOAT.");
    714         }
    715     }
    716 
    717 
    718     // Convenience method for the contructor's audio buffer size check.
    719     // preconditions:
    720     //    mChannelCount is valid
    721     //    mAudioFormat is AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT,
    722     //                 or AudioFormat.ENCODING_PCM_FLOAT
    723     // postcondition:
    724     //    mNativeBufferSizeInBytes is valid (multiple of frame size, positive)
    725     private void audioBuffSizeCheck(int audioBufferSize) throws IllegalArgumentException {
    726         // NB: this section is only valid with PCM data.
    727         // To update when supporting compressed formats
    728         int frameSizeInBytes = mChannelCount
    729             * (AudioFormat.getBytesPerSample(mAudioFormat));
    730         if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) {
    731             throw new IllegalArgumentException("Invalid audio buffer size " + audioBufferSize
    732                     + " (frame size " + frameSizeInBytes + ")");
    733         }
    734 
    735         mNativeBufferSizeInBytes = audioBufferSize;
    736     }
    737 
    738 
    739 
    740     /**
    741      * Releases the native AudioRecord resources.
    742      * The object can no longer be used and the reference should be set to null
    743      * after a call to release()
    744      */
    745     public void release() {
    746         try {
    747             stop();
    748         } catch(IllegalStateException ise) {
    749             // don't raise an exception, we're releasing the resources.
    750         }
    751         native_release();
    752         mState = STATE_UNINITIALIZED;
    753     }
    754 
    755 
    756     @Override
    757     protected void finalize() {
    758         // will cause stop() to be called, and if appropriate, will handle fixed volume recording
    759         release();
    760     }
    761 
    762 
    763     //--------------------------------------------------------------------------
    764     // Getters
    765     //--------------------
    766     /**
    767      * Returns the configured audio sink sample rate in Hz.
    768      * The sink sample rate never changes after construction.
    769      * If the constructor had a specific sample rate, then the sink sample rate is that value.
    770      * If the constructor had {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED},
    771      * then the sink sample rate is a route-dependent default value based on the source [sic].
    772      */
    773     public int getSampleRate() {
    774         return mSampleRate;
    775     }
    776 
    777     /**
    778      * Returns the audio recording source.
    779      * @see MediaRecorder.AudioSource
    780      */
    781     public int getAudioSource() {
    782         return mRecordSource;
    783     }
    784 
    785     /**
    786      * Returns the configured audio data encoding. See {@link AudioFormat#ENCODING_PCM_8BIT},
    787      * {@link AudioFormat#ENCODING_PCM_16BIT}, and {@link AudioFormat#ENCODING_PCM_FLOAT}.
    788      */
    789     public int getAudioFormat() {
    790         return mAudioFormat;
    791     }
    792 
    793     /**
    794      * Returns the configured channel position mask.
    795      * <p> See {@link AudioFormat#CHANNEL_IN_MONO}
    796      * and {@link AudioFormat#CHANNEL_IN_STEREO}.
    797      * This method may return {@link AudioFormat#CHANNEL_INVALID} if
    798      * a channel index mask is used.
    799      * Consider {@link #getFormat()} instead, to obtain an {@link AudioFormat},
    800      * which contains both the channel position mask and the channel index mask.
    801      */
    802     public int getChannelConfiguration() {
    803         return mChannelMask;
    804     }
    805 
    806     /**
    807      * Returns the configured <code>AudioRecord</code> format.
    808      * @return an {@link AudioFormat} containing the
    809      * <code>AudioRecord</code> parameters at the time of configuration.
    810      */
    811     public @NonNull AudioFormat getFormat() {
    812         AudioFormat.Builder builder = new AudioFormat.Builder()
    813             .setSampleRate(mSampleRate)
    814             .setEncoding(mAudioFormat);
    815         if (mChannelMask != AudioFormat.CHANNEL_INVALID) {
    816             builder.setChannelMask(mChannelMask);
    817         }
    818         if (mChannelIndexMask != AudioFormat.CHANNEL_INVALID  /* 0 */) {
    819             builder.setChannelIndexMask(mChannelIndexMask);
    820         }
    821         return builder.build();
    822     }
    823 
    824     /**
    825      * Returns the configured number of channels.
    826      */
    827     public int getChannelCount() {
    828         return mChannelCount;
    829     }
    830 
    831     /**
    832      * Returns the state of the AudioRecord instance. This is useful after the
    833      * AudioRecord instance has been created to check if it was initialized
    834      * properly. This ensures that the appropriate hardware resources have been
    835      * acquired.
    836      * @see AudioRecord#STATE_INITIALIZED
    837      * @see AudioRecord#STATE_UNINITIALIZED
    838      */
    839     public int getState() {
    840         return mState;
    841     }
    842 
    843     /**
    844      * Returns the recording state of the AudioRecord instance.
    845      * @see AudioRecord#RECORDSTATE_STOPPED
    846      * @see AudioRecord#RECORDSTATE_RECORDING
    847      */
    848     public int getRecordingState() {
    849         synchronized (mRecordingStateLock) {
    850             return mRecordingState;
    851         }
    852     }
    853 
    854     /**
    855      *  Returns the frame count of the native <code>AudioRecord</code> buffer.
    856      *  This is greater than or equal to the bufferSizeInBytes converted to frame units
    857      *  specified in the <code>AudioRecord</code> constructor or Builder.
    858      *  The native frame count may be enlarged to accommodate the requirements of the
    859      *  source on creation or if the <code>AudioRecord</code>
    860      *  is subsequently rerouted.
    861      *  @return current size in frames of the <code>AudioRecord</code> buffer.
    862      *  @throws IllegalStateException
    863      */
    864     public int getBufferSizeInFrames() {
    865         return native_get_buffer_size_in_frames();
    866     }
    867 
    868     /**
    869      * Returns the notification marker position expressed in frames.
    870      */
    871     public int getNotificationMarkerPosition() {
    872         return native_get_marker_pos();
    873     }
    874 
    875     /**
    876      * Returns the notification update period expressed in frames.
    877      */
    878     public int getPositionNotificationPeriod() {
    879         return native_get_pos_update_period();
    880     }
    881 
    882     /**
    883      * Poll for an {@link AudioTimestamp} on demand.
    884      * <p>
    885      * The AudioTimestamp reflects the frame delivery information at
    886      * the earliest point available in the capture pipeline.
    887      * <p>
    888      * Calling {@link #startRecording()} following a {@link #stop()} will reset
    889      * the frame count to 0.
    890      *
    891      * @param outTimestamp a caller provided non-null AudioTimestamp instance,
    892      *        which is updated with the AudioRecord frame delivery information upon success.
    893      * @param timebase one of
    894      *        {@link AudioTimestamp#TIMEBASE_BOOTTIME AudioTimestamp.TIMEBASE_BOOTTIME} or
    895      *        {@link AudioTimestamp#TIMEBASE_MONOTONIC AudioTimestamp.TIMEBASE_MONOTONIC},
    896      *        used to select the clock for the AudioTimestamp time.
    897      * @return {@link #SUCCESS} if a timestamp is available,
    898      *         or {@link #ERROR_INVALID_OPERATION} if a timestamp not available.
    899      */
    900      public int getTimestamp(@NonNull AudioTimestamp outTimestamp,
    901              @AudioTimestamp.Timebase int timebase)
    902      {
    903          if (outTimestamp == null ||
    904                  (timebase != AudioTimestamp.TIMEBASE_BOOTTIME
    905                  && timebase != AudioTimestamp.TIMEBASE_MONOTONIC)) {
    906              throw new IllegalArgumentException();
    907          }
    908          return native_get_timestamp(outTimestamp, timebase);
    909      }
    910 
    911     /**
    912      * Returns the minimum buffer size required for the successful creation of an AudioRecord
    913      * object, in byte units.
    914      * Note that this size doesn't guarantee a smooth recording under load, and higher values
    915      * should be chosen according to the expected frequency at which the AudioRecord instance
    916      * will be polled for new data.
    917      * See {@link #AudioRecord(int, int, int, int, int)} for more information on valid
    918      * configuration values.
    919      * @param sampleRateInHz the sample rate expressed in Hertz.
    920      *   {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} is not permitted.
    921      * @param channelConfig describes the configuration of the audio channels.
    922      *   See {@link AudioFormat#CHANNEL_IN_MONO} and
    923      *   {@link AudioFormat#CHANNEL_IN_STEREO}
    924      * @param audioFormat the format in which the audio data is represented.
    925      *   See {@link AudioFormat#ENCODING_PCM_16BIT}.
    926      * @return {@link #ERROR_BAD_VALUE} if the recording parameters are not supported by the
    927      *  hardware, or an invalid parameter was passed,
    928      *  or {@link #ERROR} if the implementation was unable to query the hardware for its
    929      *  input properties,
    930      *   or the minimum buffer size expressed in bytes.
    931      * @see #AudioRecord(int, int, int, int, int)
    932      */
    933     static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
    934         int channelCount = 0;
    935         switch (channelConfig) {
    936         case AudioFormat.CHANNEL_IN_DEFAULT: // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
    937         case AudioFormat.CHANNEL_IN_MONO:
    938         case AudioFormat.CHANNEL_CONFIGURATION_MONO:
    939             channelCount = 1;
    940             break;
    941         case AudioFormat.CHANNEL_IN_STEREO:
    942         case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
    943         case (AudioFormat.CHANNEL_IN_FRONT | AudioFormat.CHANNEL_IN_BACK):
    944             channelCount = 2;
    945             break;
    946         case AudioFormat.CHANNEL_INVALID:
    947         default:
    948             loge("getMinBufferSize(): Invalid channel configuration.");
    949             return ERROR_BAD_VALUE;
    950         }
    951 
    952         int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
    953         if (size == 0) {
    954             return ERROR_BAD_VALUE;
    955         }
    956         else if (size == -1) {
    957             return ERROR;
    958         }
    959         else {
    960             return size;
    961         }
    962     }
    963 
    964     /**
    965      * Returns the audio session ID.
    966      *
    967      * @return the ID of the audio session this AudioRecord belongs to.
    968      */
    969     public int getAudioSessionId() {
    970         return mSessionId;
    971     }
    972 
    973     //---------------------------------------------------------
    974     // Transport control methods
    975     //--------------------
    976     /**
    977      * Starts recording from the AudioRecord instance.
    978      * @throws IllegalStateException
    979      */
    980     public void startRecording()
    981     throws IllegalStateException {
    982         if (mState != STATE_INITIALIZED) {
    983             throw new IllegalStateException("startRecording() called on an "
    984                     + "uninitialized AudioRecord.");
    985         }
    986 
    987         // start recording
    988         synchronized(mRecordingStateLock) {
    989             if (native_start(MediaSyncEvent.SYNC_EVENT_NONE, 0) == SUCCESS) {
    990                 handleFullVolumeRec(true);
    991                 mRecordingState = RECORDSTATE_RECORDING;
    992             }
    993         }
    994     }
    995 
    996     /**
    997      * Starts recording from the AudioRecord instance when the specified synchronization event
    998      * occurs on the specified audio session.
    999      * @throws IllegalStateException
   1000      * @param syncEvent event that triggers the capture.
   1001      * @see MediaSyncEvent
   1002      */
   1003     public void startRecording(MediaSyncEvent syncEvent)
   1004     throws IllegalStateException {
   1005         if (mState != STATE_INITIALIZED) {
   1006             throw new IllegalStateException("startRecording() called on an "
   1007                     + "uninitialized AudioRecord.");
   1008         }
   1009 
   1010         // start recording
   1011         synchronized(mRecordingStateLock) {
   1012             if (native_start(syncEvent.getType(), syncEvent.getAudioSessionId()) == SUCCESS) {
   1013                 handleFullVolumeRec(true);
   1014                 mRecordingState = RECORDSTATE_RECORDING;
   1015             }
   1016         }
   1017     }
   1018 
   1019     /**
   1020      * Stops recording.
   1021      * @throws IllegalStateException
   1022      */
   1023     public void stop()
   1024     throws IllegalStateException {
   1025         if (mState != STATE_INITIALIZED) {
   1026             throw new IllegalStateException("stop() called on an uninitialized AudioRecord.");
   1027         }
   1028 
   1029         // stop recording
   1030         synchronized(mRecordingStateLock) {
   1031             handleFullVolumeRec(false);
   1032             native_stop();
   1033             mRecordingState = RECORDSTATE_STOPPED;
   1034         }
   1035     }
   1036 
   1037     private final IBinder mICallBack = new Binder();
   1038     private void handleFullVolumeRec(boolean starting) {
   1039         if (!mIsSubmixFullVolume) {
   1040             return;
   1041         }
   1042         final IBinder b = ServiceManager.getService(android.content.Context.AUDIO_SERVICE);
   1043         final IAudioService ias = IAudioService.Stub.asInterface(b);
   1044         try {
   1045             ias.forceRemoteSubmixFullVolume(starting, mICallBack);
   1046         } catch (RemoteException e) {
   1047             Log.e(TAG, "Error talking to AudioService when handling full submix volume", e);
   1048         }
   1049     }
   1050 
   1051     //---------------------------------------------------------
   1052     // Audio data supply
   1053     //--------------------
   1054     /**
   1055      * Reads audio data from the audio hardware for recording into a byte array.
   1056      * The format specified in the AudioRecord constructor should be
   1057      * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
   1058      * @param audioData the array to which the recorded audio data is written.
   1059      * @param offsetInBytes index in audioData from which the data is written expressed in bytes.
   1060      * @param sizeInBytes the number of requested bytes.
   1061      * @return zero or the positive number of bytes that were read, or one of the following
   1062      *    error codes. The number of bytes will not exceed sizeInBytes.
   1063      * <ul>
   1064      * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
   1065      * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
   1066      * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
   1067      *    needs to be recreated. The dead object error code is not returned if some data was
   1068      *    successfully transferred. In this case, the error is returned at the next read()</li>
   1069      * <li>{@link #ERROR} in case of other error</li>
   1070      * </ul>
   1071      */
   1072     public int read(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes) {
   1073         return read(audioData, offsetInBytes, sizeInBytes, READ_BLOCKING);
   1074     }
   1075 
   1076     /**
   1077      * Reads audio data from the audio hardware for recording into a byte array.
   1078      * The format specified in the AudioRecord constructor should be
   1079      * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
   1080      * The format can be {@link AudioFormat#ENCODING_PCM_16BIT}, but this is deprecated.
   1081      * @param audioData the array to which the recorded audio data is written.
   1082      * @param offsetInBytes index in audioData to which the data is written expressed in bytes.
   1083      *        Must not be negative, or cause the data access to go out of bounds of the array.
   1084      * @param sizeInBytes the number of requested bytes.
   1085      *        Must not be negative, or cause the data access to go out of bounds of the array.
   1086      * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
   1087      *     <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
   1088      *     is read.
   1089      *     <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
   1090      *     reading as much audio data as possible without blocking.
   1091      * @return zero or the positive number of bytes that were read, or one of the following
   1092      *    error codes. The number of bytes will be a multiple of the frame size in bytes
   1093      *    not to exceed sizeInBytes.
   1094      * <ul>
   1095      * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
   1096      * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
   1097      * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
   1098      *    needs to be recreated. The dead object error code is not returned if some data was
   1099      *    successfully transferred. In this case, the error is returned at the next read()</li>
   1100      * <li>{@link #ERROR} in case of other error</li>
   1101      * </ul>
   1102      */
   1103     public int read(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes,
   1104             @ReadMode int readMode) {
   1105         if (mState != STATE_INITIALIZED  || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
   1106             return ERROR_INVALID_OPERATION;
   1107         }
   1108 
   1109         if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
   1110             Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
   1111             return ERROR_BAD_VALUE;
   1112         }
   1113 
   1114         if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
   1115                 || (offsetInBytes + sizeInBytes < 0)  // detect integer overflow
   1116                 || (offsetInBytes + sizeInBytes > audioData.length)) {
   1117             return ERROR_BAD_VALUE;
   1118         }
   1119 
   1120         return native_read_in_byte_array(audioData, offsetInBytes, sizeInBytes,
   1121                 readMode == READ_BLOCKING);
   1122     }
   1123 
   1124     /**
   1125      * Reads audio data from the audio hardware for recording into a short array.
   1126      * The format specified in the AudioRecord constructor should be
   1127      * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
   1128      * @param audioData the array to which the recorded audio data is written.
   1129      * @param offsetInShorts index in audioData to which the data is written expressed in shorts.
   1130      *        Must not be negative, or cause the data access to go out of bounds of the array.
   1131      * @param sizeInShorts the number of requested shorts.
   1132      *        Must not be negative, or cause the data access to go out of bounds of the array.
   1133      * @return zero or the positive number of shorts that were read, or one of the following
   1134      *    error codes. The number of shorts will be a multiple of the channel count not to exceed
   1135      *    sizeInShorts.
   1136      * <ul>
   1137      * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
   1138      * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
   1139      * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
   1140      *    needs to be recreated. The dead object error code is not returned if some data was
   1141      *    successfully transferred. In this case, the error is returned at the next read()</li>
   1142      * <li>{@link #ERROR} in case of other error</li>
   1143      * </ul>
   1144      */
   1145     public int read(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts) {
   1146         return read(audioData, offsetInShorts, sizeInShorts, READ_BLOCKING);
   1147     }
   1148 
   1149     /**
   1150      * Reads audio data from the audio hardware for recording into a short array.
   1151      * The format specified in the AudioRecord constructor should be
   1152      * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
   1153      * @param audioData the array to which the recorded audio data is written.
   1154      * @param offsetInShorts index in audioData from which the data is written expressed in shorts.
   1155      *        Must not be negative, or cause the data access to go out of bounds of the array.
   1156      * @param sizeInShorts the number of requested shorts.
   1157      *        Must not be negative, or cause the data access to go out of bounds of the array.
   1158      * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
   1159      *     <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
   1160      *     is read.
   1161      *     <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
   1162      *     reading as much audio data as possible without blocking.
   1163      * @return zero or the positive number of shorts that were read, or one of the following
   1164      *    error codes. The number of shorts will be a multiple of the channel count not to exceed
   1165      *    sizeInShorts.
   1166      * <ul>
   1167      * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
   1168      * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
   1169      * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
   1170      *    needs to be recreated. The dead object error code is not returned if some data was
   1171      *    successfully transferred. In this case, the error is returned at the next read()</li>
   1172      * <li>{@link #ERROR} in case of other error</li>
   1173      * </ul>
   1174      */
   1175     public int read(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts,
   1176             @ReadMode int readMode) {
   1177         if (mState != STATE_INITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
   1178             return ERROR_INVALID_OPERATION;
   1179         }
   1180 
   1181         if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
   1182             Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
   1183             return ERROR_BAD_VALUE;
   1184         }
   1185 
   1186         if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
   1187                 || (offsetInShorts + sizeInShorts < 0)  // detect integer overflow
   1188                 || (offsetInShorts + sizeInShorts > audioData.length)) {
   1189             return ERROR_BAD_VALUE;
   1190         }
   1191 
   1192         return native_read_in_short_array(audioData, offsetInShorts, sizeInShorts,
   1193                 readMode == READ_BLOCKING);
   1194     }
   1195 
   1196     /**
   1197      * Reads audio data from the audio hardware for recording into a float array.
   1198      * The format specified in the AudioRecord constructor should be
   1199      * {@link AudioFormat#ENCODING_PCM_FLOAT} to correspond to the data in the array.
   1200      * @param audioData the array to which the recorded audio data is written.
   1201      * @param offsetInFloats index in audioData from which the data is written.
   1202      *        Must not be negative, or cause the data access to go out of bounds of the array.
   1203      * @param sizeInFloats the number of requested floats.
   1204      *        Must not be negative, or cause the data access to go out of bounds of the array.
   1205      * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
   1206      *     <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
   1207      *     is read.
   1208      *     <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
   1209      *     reading as much audio data as possible without blocking.
   1210      * @return zero or the positive number of floats that were read, or one of the following
   1211      *    error codes. The number of floats will be a multiple of the channel count not to exceed
   1212      *    sizeInFloats.
   1213      * <ul>
   1214      * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
   1215      * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
   1216      * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
   1217      *    needs to be recreated. The dead object error code is not returned if some data was
   1218      *    successfully transferred. In this case, the error is returned at the next read()</li>
   1219      * <li>{@link #ERROR} in case of other error</li>
   1220      * </ul>
   1221      */
   1222     public int read(@NonNull float[] audioData, int offsetInFloats, int sizeInFloats,
   1223             @ReadMode int readMode) {
   1224         if (mState == STATE_UNINITIALIZED) {
   1225             Log.e(TAG, "AudioRecord.read() called in invalid state STATE_UNINITIALIZED");
   1226             return ERROR_INVALID_OPERATION;
   1227         }
   1228 
   1229         if (mAudioFormat != AudioFormat.ENCODING_PCM_FLOAT) {
   1230             Log.e(TAG, "AudioRecord.read(float[] ...) requires format ENCODING_PCM_FLOAT");
   1231             return ERROR_INVALID_OPERATION;
   1232         }
   1233 
   1234         if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
   1235             Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
   1236             return ERROR_BAD_VALUE;
   1237         }
   1238 
   1239         if ((audioData == null) || (offsetInFloats < 0) || (sizeInFloats < 0)
   1240                 || (offsetInFloats + sizeInFloats < 0)  // detect integer overflow
   1241                 || (offsetInFloats + sizeInFloats > audioData.length)) {
   1242             return ERROR_BAD_VALUE;
   1243         }
   1244 
   1245         return native_read_in_float_array(audioData, offsetInFloats, sizeInFloats,
   1246                 readMode == READ_BLOCKING);
   1247     }
   1248 
   1249     /**
   1250      * Reads audio data from the audio hardware for recording into a direct buffer. If this buffer
   1251      * is not a direct buffer, this method will always return 0.
   1252      * Note that the value returned by {@link java.nio.Buffer#position()} on this buffer is
   1253      * unchanged after a call to this method.
   1254      * The representation of the data in the buffer will depend on the format specified in
   1255      * the AudioRecord constructor, and will be native endian.
   1256      * @param audioBuffer the direct buffer to which the recorded audio data is written.
   1257      * Data is written to audioBuffer.position().
   1258      * @param sizeInBytes the number of requested bytes. It is recommended but not enforced
   1259      *    that the number of bytes requested be a multiple of the frame size (sample size in
   1260      *    bytes multiplied by the channel count).
   1261      * @return zero or the positive number of bytes that were read, or one of the following
   1262      *    error codes. The number of bytes will not exceed sizeInBytes and will be truncated to be
   1263      *    a multiple of the frame size.
   1264      * <ul>
   1265      * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
   1266      * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
   1267      * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
   1268      *    needs to be recreated. The dead object error code is not returned if some data was
   1269      *    successfully transferred. In this case, the error is returned at the next read()</li>
   1270      * <li>{@link #ERROR} in case of other error</li>
   1271      * </ul>
   1272      */
   1273     public int read(@NonNull ByteBuffer audioBuffer, int sizeInBytes) {
   1274         return read(audioBuffer, sizeInBytes, READ_BLOCKING);
   1275     }
   1276 
   1277     /**
   1278      * Reads audio data from the audio hardware for recording into a direct buffer. If this buffer
   1279      * is not a direct buffer, this method will always return 0.
   1280      * Note that the value returned by {@link java.nio.Buffer#position()} on this buffer is
   1281      * unchanged after a call to this method.
   1282      * The representation of the data in the buffer will depend on the format specified in
   1283      * the AudioRecord constructor, and will be native endian.
   1284      * @param audioBuffer the direct buffer to which the recorded audio data is written.
   1285      * Data is written to audioBuffer.position().
   1286      * @param sizeInBytes the number of requested bytes. It is recommended but not enforced
   1287      *    that the number of bytes requested be a multiple of the frame size (sample size in
   1288      *    bytes multiplied by the channel count).
   1289      * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
   1290      *     <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
   1291      *     is read.
   1292      *     <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
   1293      *     reading as much audio data as possible without blocking.
   1294      * @return zero or the positive number of bytes that were read, or one of the following
   1295      *    error codes. The number of bytes will not exceed sizeInBytes and will be truncated to be
   1296      *    a multiple of the frame size.
   1297      * <ul>
   1298      * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
   1299      * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
   1300      * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
   1301      *    needs to be recreated. The dead object error code is not returned if some data was
   1302      *    successfully transferred. In this case, the error is returned at the next read()</li>
   1303      * <li>{@link #ERROR} in case of other error</li>
   1304      * </ul>
   1305      */
   1306     public int read(@NonNull ByteBuffer audioBuffer, int sizeInBytes, @ReadMode int readMode) {
   1307         if (mState != STATE_INITIALIZED) {
   1308             return ERROR_INVALID_OPERATION;
   1309         }
   1310 
   1311         if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
   1312             Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
   1313             return ERROR_BAD_VALUE;
   1314         }
   1315 
   1316         if ( (audioBuffer == null) || (sizeInBytes < 0) ) {
   1317             return ERROR_BAD_VALUE;
   1318         }
   1319 
   1320         return native_read_in_direct_buffer(audioBuffer, sizeInBytes, readMode == READ_BLOCKING);
   1321     }
   1322 
   1323     /**
   1324      *  Return Metrics data about the current AudioTrack instance.
   1325      *
   1326      * @return a {@link PersistableBundle} containing the set of attributes and values
   1327      * available for the media being handled by this instance of AudioRecord
   1328      * The attributes are descibed in {@link MetricsConstants}.
   1329      *
   1330      * Additional vendor-specific fields may also be present in
   1331      * the return value.
   1332      */
   1333     public PersistableBundle getMetrics() {
   1334         PersistableBundle bundle = native_getMetrics();
   1335         return bundle;
   1336     }
   1337 
   1338     private native PersistableBundle native_getMetrics();
   1339 
   1340     //--------------------------------------------------------------------------
   1341     // Initialization / configuration
   1342     //--------------------
   1343     /**
   1344      * Sets the listener the AudioRecord notifies when a previously set marker is reached or
   1345      * for each periodic record head position update.
   1346      * @param listener
   1347      */
   1348     public void setRecordPositionUpdateListener(OnRecordPositionUpdateListener listener) {
   1349         setRecordPositionUpdateListener(listener, null);
   1350     }
   1351 
   1352     /**
   1353      * Sets the listener the AudioRecord notifies when a previously set marker is reached or
   1354      * for each periodic record head position update.
   1355      * Use this method to receive AudioRecord events in the Handler associated with another
   1356      * thread than the one in which you created the AudioRecord instance.
   1357      * @param listener
   1358      * @param handler the Handler that will receive the event notification messages.
   1359      */
   1360     public void setRecordPositionUpdateListener(OnRecordPositionUpdateListener listener,
   1361                                                     Handler handler) {
   1362         synchronized (mPositionListenerLock) {
   1363 
   1364             mPositionListener = listener;
   1365 
   1366             if (listener != null) {
   1367                 if (handler != null) {
   1368                     mEventHandler = new NativeEventHandler(this, handler.getLooper());
   1369                 } else {
   1370                     // no given handler, use the looper the AudioRecord was created in
   1371                     mEventHandler = new NativeEventHandler(this, mInitializationLooper);
   1372                 }
   1373             } else {
   1374                 mEventHandler = null;
   1375             }
   1376         }
   1377 
   1378     }
   1379 
   1380 
   1381     /**
   1382      * Sets the marker position at which the listener is called, if set with
   1383      * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)} or
   1384      * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener, Handler)}.
   1385      * @param markerInFrames marker position expressed in frames
   1386      * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
   1387      *  {@link #ERROR_INVALID_OPERATION}
   1388      */
   1389     public int setNotificationMarkerPosition(int markerInFrames) {
   1390         if (mState == STATE_UNINITIALIZED) {
   1391             return ERROR_INVALID_OPERATION;
   1392         }
   1393         return native_set_marker_pos(markerInFrames);
   1394     }
   1395 
   1396     /**
   1397      * Returns an {@link AudioDeviceInfo} identifying the current routing of this AudioRecord.
   1398      * Note: The query is only valid if the AudioRecord is currently recording. If it is not,
   1399      * <code>getRoutedDevice()</code> will return null.
   1400      */
   1401     @Override
   1402     public AudioDeviceInfo getRoutedDevice() {
   1403         int deviceId = native_getRoutedDeviceId();
   1404         if (deviceId == 0) {
   1405             return null;
   1406         }
   1407         AudioDeviceInfo[] devices =
   1408                 AudioManager.getDevicesStatic(AudioManager.GET_DEVICES_INPUTS);
   1409         for (int i = 0; i < devices.length; i++) {
   1410             if (devices[i].getId() == deviceId) {
   1411                 return devices[i];
   1412             }
   1413         }
   1414         return null;
   1415     }
   1416 
   1417     /*
   1418      * Call BEFORE adding a routing callback handler.
   1419      */
   1420     @GuardedBy("mRoutingChangeListeners")
   1421     private void testEnableNativeRoutingCallbacksLocked() {
   1422         if (mRoutingChangeListeners.size() == 0) {
   1423             native_enableDeviceCallback();
   1424         }
   1425     }
   1426 
   1427     /*
   1428      * Call AFTER removing a routing callback handler.
   1429      */
   1430     @GuardedBy("mRoutingChangeListeners")
   1431     private void testDisableNativeRoutingCallbacksLocked() {
   1432         if (mRoutingChangeListeners.size() == 0) {
   1433             native_disableDeviceCallback();
   1434         }
   1435     }
   1436 
   1437     //--------------------------------------------------------------------------
   1438     // (Re)Routing Info
   1439     //--------------------
   1440     /**
   1441      * The list of AudioRouting.OnRoutingChangedListener interfaces added (with
   1442      * {@link AudioRecord#addOnRoutingChangedListener} by an app to receive
   1443      * (re)routing notifications.
   1444      */
   1445     @GuardedBy("mRoutingChangeListeners")
   1446     private ArrayMap<AudioRouting.OnRoutingChangedListener,
   1447             NativeRoutingEventHandlerDelegate> mRoutingChangeListeners = new ArrayMap<>();
   1448 
   1449     /**
   1450      * Adds an {@link AudioRouting.OnRoutingChangedListener} to receive notifications of
   1451      * routing changes on this AudioRecord.
   1452      * @param listener The {@link AudioRouting.OnRoutingChangedListener} interface to receive
   1453      * notifications of rerouting events.
   1454      * @param handler  Specifies the {@link Handler} object for the thread on which to execute
   1455      * the callback. If <code>null</code>, the {@link Handler} associated with the main
   1456      * {@link Looper} will be used.
   1457      */
   1458     @Override
   1459     public void addOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener,
   1460             android.os.Handler handler) {
   1461         synchronized (mRoutingChangeListeners) {
   1462             if (listener != null && !mRoutingChangeListeners.containsKey(listener)) {
   1463                 testEnableNativeRoutingCallbacksLocked();
   1464                 mRoutingChangeListeners.put(
   1465                         listener, new NativeRoutingEventHandlerDelegate(this, listener,
   1466                                 handler != null ? handler : new Handler(mInitializationLooper)));
   1467             }
   1468         }
   1469     }
   1470 
   1471     /**
   1472      * Removes an {@link AudioRouting.OnRoutingChangedListener} which has been previously added
   1473     * to receive rerouting notifications.
   1474     * @param listener The previously added {@link AudioRouting.OnRoutingChangedListener} interface
   1475     * to remove.
   1476     */
   1477     @Override
   1478     public void removeOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener) {
   1479         synchronized (mRoutingChangeListeners) {
   1480             if (mRoutingChangeListeners.containsKey(listener)) {
   1481                 mRoutingChangeListeners.remove(listener);
   1482                 testDisableNativeRoutingCallbacksLocked();
   1483             }
   1484         }
   1485     }
   1486 
   1487     //--------------------------------------------------------------------------
   1488     // (Re)Routing Info
   1489     //--------------------
   1490     /**
   1491      * Defines the interface by which applications can receive notifications of
   1492      * routing changes for the associated {@link AudioRecord}.
   1493      *
   1494      * @deprecated users should switch to the general purpose
   1495      *             {@link AudioRouting.OnRoutingChangedListener} class instead.
   1496      */
   1497     @Deprecated
   1498     public interface OnRoutingChangedListener extends AudioRouting.OnRoutingChangedListener {
   1499         /**
   1500          * Called when the routing of an AudioRecord changes from either and
   1501          * explicit or policy rerouting. Use {@link #getRoutedDevice()} to
   1502          * retrieve the newly routed-from device.
   1503          */
   1504         public void onRoutingChanged(AudioRecord audioRecord);
   1505 
   1506         @Override
   1507         default public void onRoutingChanged(AudioRouting router) {
   1508             if (router instanceof AudioRecord) {
   1509                 onRoutingChanged((AudioRecord) router);
   1510             }
   1511         }
   1512     }
   1513 
   1514     /**
   1515      * Adds an {@link OnRoutingChangedListener} to receive notifications of routing changes
   1516      * on this AudioRecord.
   1517      * @param listener The {@link OnRoutingChangedListener} interface to receive notifications
   1518      * of rerouting events.
   1519      * @param handler  Specifies the {@link Handler} object for the thread on which to execute
   1520      * the callback. If <code>null</code>, the {@link Handler} associated with the main
   1521      * {@link Looper} will be used.
   1522      * @deprecated users should switch to the general purpose
   1523      *             {@link AudioRouting.OnRoutingChangedListener} class instead.
   1524      */
   1525     @Deprecated
   1526     public void addOnRoutingChangedListener(OnRoutingChangedListener listener,
   1527             android.os.Handler handler) {
   1528         addOnRoutingChangedListener((AudioRouting.OnRoutingChangedListener) listener, handler);
   1529     }
   1530 
   1531     /**
   1532       * Removes an {@link OnRoutingChangedListener} which has been previously added
   1533      * to receive rerouting notifications.
   1534      * @param listener The previously added {@link OnRoutingChangedListener} interface to remove.
   1535      * @deprecated users should switch to the general purpose
   1536      *             {@link AudioRouting.OnRoutingChangedListener} class instead.
   1537      */
   1538     @Deprecated
   1539     public void removeOnRoutingChangedListener(OnRoutingChangedListener listener) {
   1540         removeOnRoutingChangedListener((AudioRouting.OnRoutingChangedListener) listener);
   1541     }
   1542 
   1543     /**
   1544      * Sends device list change notification to all listeners.
   1545      */
   1546     private void broadcastRoutingChange() {
   1547         AudioManager.resetAudioPortGeneration();
   1548         synchronized (mRoutingChangeListeners) {
   1549             for (NativeRoutingEventHandlerDelegate delegate : mRoutingChangeListeners.values()) {
   1550                 delegate.notifyClient();
   1551             }
   1552         }
   1553     }
   1554 
   1555     /**
   1556      * Sets the period at which the listener is called, if set with
   1557      * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)} or
   1558      * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener, Handler)}.
   1559      * It is possible for notifications to be lost if the period is too small.
   1560      * @param periodInFrames update period expressed in frames
   1561      * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION}
   1562      */
   1563     public int setPositionNotificationPeriod(int periodInFrames) {
   1564         if (mState == STATE_UNINITIALIZED) {
   1565             return ERROR_INVALID_OPERATION;
   1566         }
   1567         return native_set_pos_update_period(periodInFrames);
   1568     }
   1569 
   1570     //--------------------------------------------------------------------------
   1571     // Explicit Routing
   1572     //--------------------
   1573     private AudioDeviceInfo mPreferredDevice = null;
   1574 
   1575     /**
   1576      * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
   1577      * the input to this AudioRecord.
   1578      * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio source.
   1579      *  If deviceInfo is null, default routing is restored.
   1580      * @return true if successful, false if the specified {@link AudioDeviceInfo} is non-null and
   1581      * does not correspond to a valid audio input device.
   1582      */
   1583     @Override
   1584     public boolean setPreferredDevice(AudioDeviceInfo deviceInfo) {
   1585         // Do some validation....
   1586         if (deviceInfo != null && !deviceInfo.isSource()) {
   1587             return false;
   1588         }
   1589 
   1590         int preferredDeviceId = deviceInfo != null ? deviceInfo.getId() : 0;
   1591         boolean status = native_setInputDevice(preferredDeviceId);
   1592         if (status == true) {
   1593             synchronized (this) {
   1594                 mPreferredDevice = deviceInfo;
   1595             }
   1596         }
   1597         return status;
   1598     }
   1599 
   1600     /**
   1601      * Returns the selected input specified by {@link #setPreferredDevice}. Note that this
   1602      * is not guarenteed to correspond to the actual device being used for recording.
   1603      */
   1604     @Override
   1605     public AudioDeviceInfo getPreferredDevice() {
   1606         synchronized (this) {
   1607             return mPreferredDevice;
   1608         }
   1609     }
   1610 
   1611     //--------------------------------------------------------------------------
   1612     // Microphone information
   1613     //--------------------
   1614     /**
   1615      * Returns a lists of {@link MicrophoneInfo} representing the active microphones.
   1616      * By querying channel mapping for each active microphone, developer can know how
   1617      * the microphone is used by each channels or a capture stream.
   1618      * Note that the information about the active microphones may change during a recording.
   1619      * See {@link AudioManager#registerAudioDeviceCallback} to be notified of changes
   1620      * in the audio devices, querying the active microphones then will return the latest
   1621      * information.
   1622      *
   1623      * @return a lists of {@link MicrophoneInfo} representing the active microphones.
   1624      * @throws IOException if an error occurs
   1625      */
   1626     public List<MicrophoneInfo> getActiveMicrophones() throws IOException {
   1627         ArrayList<MicrophoneInfo> activeMicrophones = new ArrayList<>();
   1628         int status = native_get_active_microphones(activeMicrophones);
   1629         if (status != AudioManager.SUCCESS) {
   1630             if (status != AudioManager.ERROR_INVALID_OPERATION) {
   1631                 Log.e(TAG, "getActiveMicrophones failed:" + status);
   1632             }
   1633             Log.i(TAG, "getActiveMicrophones failed, fallback on routed device info");
   1634         }
   1635         AudioManager.setPortIdForMicrophones(activeMicrophones);
   1636 
   1637         // Use routed device when there is not information returned by hal.
   1638         if (activeMicrophones.size() == 0) {
   1639             AudioDeviceInfo device = getRoutedDevice();
   1640             if (device != null) {
   1641                 MicrophoneInfo microphone = AudioManager.microphoneInfoFromAudioDeviceInfo(device);
   1642                 ArrayList<Pair<Integer, Integer>> channelMapping = new ArrayList<>();
   1643                 for (int i = 0; i < mChannelCount; i++) {
   1644                     channelMapping.add(new Pair(i, MicrophoneInfo.CHANNEL_MAPPING_DIRECT));
   1645                 }
   1646                 microphone.setChannelMapping(channelMapping);
   1647                 activeMicrophones.add(microphone);
   1648             }
   1649         }
   1650         return activeMicrophones;
   1651     }
   1652 
   1653     //---------------------------------------------------------
   1654     // Interface definitions
   1655     //--------------------
   1656     /**
   1657      * Interface definition for a callback to be invoked when an AudioRecord has
   1658      * reached a notification marker set by {@link AudioRecord#setNotificationMarkerPosition(int)}
   1659      * or for periodic updates on the progress of the record head, as set by
   1660      * {@link AudioRecord#setPositionNotificationPeriod(int)}.
   1661      */
   1662     public interface OnRecordPositionUpdateListener  {
   1663         /**
   1664          * Called on the listener to notify it that the previously set marker has been reached
   1665          * by the recording head.
   1666          */
   1667         void onMarkerReached(AudioRecord recorder);
   1668 
   1669         /**
   1670          * Called on the listener to periodically notify it that the record head has reached
   1671          * a multiple of the notification period.
   1672          */
   1673         void onPeriodicNotification(AudioRecord recorder);
   1674     }
   1675 
   1676 
   1677 
   1678     //---------------------------------------------------------
   1679     // Inner classes
   1680     //--------------------
   1681 
   1682     /**
   1683      * Helper class to handle the forwarding of native events to the appropriate listener
   1684      * (potentially) handled in a different thread
   1685      */
   1686     private class NativeEventHandler extends Handler {
   1687         private final AudioRecord mAudioRecord;
   1688 
   1689         NativeEventHandler(AudioRecord recorder, Looper looper) {
   1690             super(looper);
   1691             mAudioRecord = recorder;
   1692         }
   1693 
   1694         @Override
   1695         public void handleMessage(Message msg) {
   1696             OnRecordPositionUpdateListener listener = null;
   1697             synchronized (mPositionListenerLock) {
   1698                 listener = mAudioRecord.mPositionListener;
   1699             }
   1700 
   1701             switch (msg.what) {
   1702             case NATIVE_EVENT_MARKER:
   1703                 if (listener != null) {
   1704                     listener.onMarkerReached(mAudioRecord);
   1705                 }
   1706                 break;
   1707             case NATIVE_EVENT_NEW_POS:
   1708                 if (listener != null) {
   1709                     listener.onPeriodicNotification(mAudioRecord);
   1710                 }
   1711                 break;
   1712             default:
   1713                 loge("Unknown native event type: " + msg.what);
   1714                 break;
   1715             }
   1716         }
   1717     }
   1718 
   1719     //---------------------------------------------------------
   1720     // Java methods called from the native side
   1721     //--------------------
   1722     @SuppressWarnings("unused")
   1723     private static void postEventFromNative(Object audiorecord_ref,
   1724             int what, int arg1, int arg2, Object obj) {
   1725         //logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2);
   1726         AudioRecord recorder = (AudioRecord)((WeakReference)audiorecord_ref).get();
   1727         if (recorder == null) {
   1728             return;
   1729         }
   1730 
   1731         if (what == AudioSystem.NATIVE_EVENT_ROUTING_CHANGE) {
   1732             recorder.broadcastRoutingChange();
   1733             return;
   1734         }
   1735 
   1736         if (recorder.mEventHandler != null) {
   1737             Message m =
   1738                 recorder.mEventHandler.obtainMessage(what, arg1, arg2, obj);
   1739             recorder.mEventHandler.sendMessage(m);
   1740         }
   1741 
   1742     }
   1743 
   1744 
   1745     //---------------------------------------------------------
   1746     // Native methods called from the Java side
   1747     //--------------------
   1748 
   1749     private native final int native_setup(Object audiorecord_this,
   1750             Object /*AudioAttributes*/ attributes,
   1751             int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
   1752             int buffSizeInBytes, int[] sessionId, String opPackageName,
   1753             long nativeRecordInJavaObj);
   1754 
   1755     // TODO remove: implementation calls directly into implementation of native_release()
   1756     private native final void native_finalize();
   1757 
   1758     /**
   1759      * @hide
   1760      */
   1761     public native final void native_release();
   1762 
   1763     private native final int native_start(int syncEvent, int sessionId);
   1764 
   1765     private native final void native_stop();
   1766 
   1767     private native final int native_read_in_byte_array(byte[] audioData,
   1768             int offsetInBytes, int sizeInBytes, boolean isBlocking);
   1769 
   1770     private native final int native_read_in_short_array(short[] audioData,
   1771             int offsetInShorts, int sizeInShorts, boolean isBlocking);
   1772 
   1773     private native final int native_read_in_float_array(float[] audioData,
   1774             int offsetInFloats, int sizeInFloats, boolean isBlocking);
   1775 
   1776     private native final int native_read_in_direct_buffer(Object jBuffer,
   1777             int sizeInBytes, boolean isBlocking);
   1778 
   1779     private native final int native_get_buffer_size_in_frames();
   1780 
   1781     private native final int native_set_marker_pos(int marker);
   1782     private native final int native_get_marker_pos();
   1783 
   1784     private native final int native_set_pos_update_period(int updatePeriod);
   1785     private native final int native_get_pos_update_period();
   1786 
   1787     static private native final int native_get_min_buff_size(
   1788             int sampleRateInHz, int channelCount, int audioFormat);
   1789 
   1790     private native final boolean native_setInputDevice(int deviceId);
   1791     private native final int native_getRoutedDeviceId();
   1792     private native final void native_enableDeviceCallback();
   1793     private native final void native_disableDeviceCallback();
   1794 
   1795     private native final int native_get_timestamp(@NonNull AudioTimestamp outTimestamp,
   1796             @AudioTimestamp.Timebase int timebase);
   1797 
   1798     private native final int native_get_active_microphones(
   1799             ArrayList<MicrophoneInfo> activeMicrophones);
   1800 
   1801     //---------------------------------------------------------
   1802     // Utility methods
   1803     //------------------
   1804 
   1805     private static void logd(String msg) {
   1806         Log.d(TAG, msg);
   1807     }
   1808 
   1809     private static void loge(String msg) {
   1810         Log.e(TAG, msg);
   1811     }
   1812 
   1813     public static final class MetricsConstants
   1814     {
   1815         private MetricsConstants() {}
   1816 
   1817         /**
   1818          * Key to extract the output format being recorded
   1819          * from the {@link AudioRecord#getMetrics} return value.
   1820          * The value is a String.
   1821          */
   1822         public static final String ENCODING = "android.media.audiorecord.encoding";
   1823 
   1824         /**
   1825          * Key to extract the Source Type for this track
   1826          * from the {@link AudioRecord#getMetrics} return value.
   1827          * The value is a String.
   1828          */
   1829         public static final String SOURCE = "android.media.audiorecord.source";
   1830 
   1831         /**
   1832          * Key to extract the estimated latency through the recording pipeline
   1833          * from the {@link AudioRecord#getMetrics} return value.
   1834          * This is in units of milliseconds.
   1835          * The value is an integer.
   1836          */
   1837         public static final String LATENCY = "android.media.audiorecord.latency";
   1838 
   1839         /**
   1840          * Key to extract the sink sample rate for this record track in Hz
   1841          * from the {@link AudioRecord#getMetrics} return value.
   1842          * The value is an integer.
   1843          */
   1844         public static final String SAMPLERATE = "android.media.audiorecord.samplerate";
   1845 
   1846         /**
   1847          * Key to extract the number of channels being recorded in this record track
   1848          * from the {@link AudioRecord#getMetrics} return value.
   1849          * The value is an integer.
   1850          */
   1851         public static final String CHANNELS = "android.media.audiorecord.channels";
   1852 
   1853     }
   1854 }
   1855