/frameworks/base/core/java/android/bluetooth/ |
BluetoothAudioConfig.java | 35 public BluetoothAudioConfig(int sampleRate, int channelConfig, int audioFormat) { 38 mAudioFormat = audioFormat; 72 int audioFormat = in.readInt(); 73 return new BluetoothAudioConfig(sampleRate, channelConfig, audioFormat); 98 * Returns the channel configuration (either {@link android.media.AudioFormat#CHANNEL_IN_MONO} 99 * or {@link android.media.AudioFormat#CHANNEL_IN_STEREO}) 108 * Returns the channel audio format (either {@link android.media.AudioFormat#ENCODING_PCM_16BIT} 109 * or {@link android.media.AudioFormat#ENCODING_PCM_8BIT}
|
/frameworks/base/core/java/android/speech/tts/ |
FileSynthesisCallback.java | 19 import android.media.AudioFormat; 102 public int start(int sampleRateInHz, int audioFormat, int channelCount) { 104 Log.d(TAG, "FileSynthesisRequest.start(" + sampleRateInHz + "," + audioFormat 107 if (audioFormat != AudioFormat.ENCODING_PCM_8BIT && 108 audioFormat != AudioFormat.ENCODING_PCM_16BIT && 109 audioFormat != AudioFormat.ENCODING_PCM_FLOAT) { 110 Log.e(TAG, "Audio format encoding " + audioFormat + " not supported. Please use one " [all...] |
ITextToSpeechCallback.aidl | 65 * @param audioFormat The audio format of the generated audio in the {@link #onAudioAvailable} 66 * call. Should be one of {@link android.media.AudioFormat.ENCODING_PCM_8BIT}, 67 * {@link android.media.AudioFormat.ENCODING_PCM_16BIT} or 68 * {@link android.media.AudioFormat.ENCODING_PCM_FLOAT}. 71 void onBeginSynthesis(String utteranceId, int sampleRateInHz, int audioFormat, int channelCount);
|
PlaybackSynthesisCallback.java | 19 import android.media.AudioFormat; 125 public int start(int sampleRateInHz, int audioFormat, int channelCount) { 126 if (DBG) Log.d(TAG, "start(" + sampleRateInHz + "," + audioFormat + "," + channelCount 128 if (audioFormat != AudioFormat.ENCODING_PCM_8BIT && 129 audioFormat != AudioFormat.ENCODING_PCM_16BIT && 130 audioFormat != AudioFormat.ENCODING_PCM_FLOAT) { 131 Log.w(TAG, "Audio format encoding " + audioFormat + " not supported. Please use one " [all...] |
SynthesisCallback.java | 20 import android.media.AudioFormat; 45 AudioFormat.ENCODING_PCM_8BIT, 46 AudioFormat.ENCODING_PCM_16BIT, 47 AudioFormat.ENCODING_PCM_FLOAT 65 * @param audioFormat Audio format of the generated audio. Must be one of {@link 66 * AudioFormat#ENCODING_PCM_8BIT} or {@link AudioFormat#ENCODING_PCM_16BIT}. Can also be 67 * {@link AudioFormat#ENCODING_PCM_FLOAT} when targetting Android N and above. 74 @SupportedAudioFormat int audioFormat, 159 * definition of a frame depends on the format given by {@link #start}. See {@link AudioFormat} [all...] |
BlockingAudioTrack.java | 5 import android.media.AudioFormat; 79 int audioFormat, int channelCount) { 82 mAudioFormat = audioFormat; 85 mBytesPerFrame = AudioFormat.getBytesPerSample(mAudioFormat) * mChannelCount; 176 return AudioFormat.CHANNEL_OUT_MONO; 178 return AudioFormat.CHANNEL_OUT_STEREO; 217 AudioFormat audioFormat = (new AudioFormat.Builder()) 222 audioFormat, bufferSizeInBytes, AudioTrack.MODE_STREAM [all...] |
UtteranceProgressListener.java | 5 import android.media.AudioFormat; 94 * @param audioFormat Audio format of the generated audio. Should be one of 95 * {@link AudioFormat#ENCODING_PCM_8BIT}, {@link AudioFormat#ENCODING_PCM_16BIT} or 96 * {@link AudioFormat#ENCODING_PCM_FLOAT}. 99 public void onBeginSynthesis(String utteranceId, int sampleRateInHz, int audioFormat, int channelCount) {
|
SynthesisPlaybackQueueItem.java | 80 int audioFormat, int channelCount, UtteranceProgressDispatcher dispatcher, 90 mAudioTrack = new BlockingAudioTrack(audioParams, sampleRate, audioFormat, channelCount);
|
TextToSpeechService.java | 686 void dispatchOnBeginSynthesis(int sampleRateInHz, int audioFormat, int channelCount); [all...] |
/frameworks/base/media/java/android/media/ |
AudioFormat.java | 31 * The {@link AudioFormat} class is used to access a number of audio format and 35 * parameter is one of the <code>AudioFormat.ENCODING_*</code> constants. 36 * The <code>AudioFormat</code> constants are also used in {@link MediaFormat} to specify 38 * <p>The {@link AudioFormat.Builder} class can be used to create instances of 39 * the <code>AudioFormat</code> format class. 41 * {@link AudioFormat.Builder} for documentation on the mechanics of the configuration and building 42 * of such instances. Here we describe the main concepts that the <code>AudioFormat</code> class 49 * <p>Closely associated with the <code>AudioFormat</code> is the notion of an 54 * <p>Expressed in Hz, the sample rate in an <code>AudioFormat</code> instance expresses the number 216 public final class AudioFormat implements Parcelable [all...] |
AudioTrack.java | 335 * Never {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED}. 345 private int mChannelMask = AudioFormat.CHANNEL_OUT_MONO; 363 * May be set to {@link AudioFormat#CHANNEL_INVALID} if a channel index mask is specified. 365 private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO; 372 * @see AudioFormat#ENCODING_PCM_8BIT 373 * @see AudioFormat#ENCODING_PCM_16BIT 374 * @see AudioFormat#ENCODING_PCM_FLOAT 421 * {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} means to use a route-dependent value 425 * See {@link AudioFormat#CHANNEL_OUT_MONO} and 426 * {@link AudioFormat#CHANNEL_OUT_STEREO [all...] |
AudioRecord.java | 180 * Never {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED}. 197 * @see AudioFormat#ENCODING_PCM_8BIT 198 * @see AudioFormat#ENCODING_PCM_16BIT 199 * @see AudioFormat#ENCODING_PCM_FLOAT 264 * {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} means to use a route-dependent value 268 * See {@link AudioFormat#CHANNEL_IN_MONO} and 269 * {@link AudioFormat#CHANNEL_IN_STEREO}. {@link AudioFormat#CHANNEL_IN_MONO} is guaranteed 271 * @param audioFormat the format in which the audio data is to be returned. 272 * See {@link AudioFormat#ENCODING_PCM_8BIT}, {@link AudioFormat#ENCODING_PCM_16BIT} [all...] |
/frameworks/av/media/libaaudio/src/core/ |
AAudioStreamParameters.h | 63 void setFormat(aaudio_format_t audioFormat) { 64 mAudioFormat = audioFormat;
|
/frameworks/base/core/jni/ |
android_media_AudioFormat.h | 22 // keep these values in sync with AudioFormat.java 49 static inline audio_format_t audioFormatToNative(int audioFormat) 51 switch (audioFormat) { 150 // 0 is the java android.media.AudioFormat.CHANNEL_INVALID value
|
android_media_AudioRecord.cpp | 193 jint audioFormat, jint buffSizeInBytes, jintArray jSession, jstring opPackageName, 197 //ALOGV("sampleRate=%d, audioFormat=%d, channel mask=%x, buffSizeInBytes=%d " 199 // sampleRateInHertz, audioFormat, channelMask, buffSizeInBytes, nativeRecordInJavaObj); 257 audio_format_t format = audioFormatToNative(audioFormat); 259 ALOGE("Error creating AudioRecord: unsupported audio format %d.", audioFormat); 662 jint sampleRateInHertz, jint channelCount, jint audioFormat) { 665 sampleRateInHertz, channelCount, audioFormat); 668 audio_format_t format = audioFormatToNative(audioFormat); [all...] |
android_media_AudioTrack.cpp | 215 jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession, 218 ALOGV("sampleRates=%p, channel mask=%x, index mask=%x, audioFormat(Java)=%d, buffSize=%d" 220 jSampleRate, channelPositionMask, channelIndexMask, audioFormat, buffSizeInBytes, 277 audio_format_t format = audioFormatToNative(audioFormat); 279 ALOGE("Error creating AudioTrack: unsupported audio format %d.", audioFormat); 631 static jint writeToTrack(const sp<AudioTrack>& track, jint audioFormat, const T *data, [all...] |
/frameworks/av/media/libstagefright/ |
AudioPlayer.cpp | 140 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; 143 sAudioFormatToPcmEncoding.map(pcmEncoding, &audioFormat); 147 if (mapMimeToAudioFormat(audioFormat, mime) != OK) { 149 audioFormat = AUDIO_FORMAT_INVALID; 151 ALOGV("Mime type \"%s\" mapped to audio_format 0x%x", mime, audioFormat); 155 if ((audioFormat == AUDIO_FORMAT_AAC) && format->findInt32(kKeyAACAOT, &aacaot)) { 157 mapAACProfileToAudioFormat(audioFormat,(OMX_AUDIO_AACPROFILETYPE) aacaot); 184 offloadInfo.format = audioFormat; 192 mSampleRate, numChannels, channelMask, audioFormat,
|
/cts/tests/tests/media/src/android/media/cts/ |
NonBlockingAudioTrack.java | 18 import android.media.AudioFormat; 53 channelConfig = AudioFormat.CHANNEL_OUT_MONO; 56 channelConfig = AudioFormat.CHANNEL_OUT_STEREO; 59 channelConfig = AudioFormat.CHANNEL_OUT_5POINT1; 69 AudioFormat.ENCODING_PCM_16BIT); 78 AudioFormat.ENCODING_PCM_16BIT, 88 AudioFormat audioFormat = (new AudioFormat.Builder()) 90 .setEncoding(AudioFormat.ENCODING_PCM_16BIT [all...] |
MediaSyncTest.java | 24 import android.media.AudioFormat; 706 AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO); 707 int audioFormat = AudioFormat.ENCODING_PCM_16BIT; 711 audioFormat); 714 * AudioFormat.getBytesPerSample(audioFormat); 722 audioFormat,
|
/packages/services/Car/tests/carservice_test/src/android/media/tests/ |
AudioPolicyTest.java | 24 import android.media.AudioFormat; 98 AudioFormat audioFormat = new AudioFormat.Builder() 99 .setEncoding(AudioFormat.ENCODING_PCM_16BIT) 100 .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO) 107 .setFormat(audioFormat)
|
/cts/tests/tests/speech/src/android/speech/tts/cts/ |
TextToSpeechWrapper.java | 253 public void onBeginSynthesis(String utteranceId, int sampleRateInHz, int audioFormat, int channelCount) { 256 Assert.assertTrue(audioFormat == android.media.AudioFormat.ENCODING_PCM_8BIT 257 || audioFormat == android.media.AudioFormat.ENCODING_PCM_16BIT 258 || audioFormat == android.media.AudioFormat.ENCODING_PCM_FLOAT);
|
/frameworks/base/media/java/android/media/soundtrigger/ |
SoundTriggerDetector.java | 28 import android.media.AudioFormat; 117 private final AudioFormat mAudioFormat; 123 AudioFormat audioFormat, int captureSession, byte[] data) { 127 mAudioFormat = audioFormat; 136 public AudioFormat getCaptureAudioFormat() {
|
/frameworks/base/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/ |
MediaDecoder.java | 264 MediaFormat audioFormat = mMediaExtractor.getTrackFormat(mAudioTrackIndex); 265 mAudioTrackDecoder = new AudioTrackDecoder(mAudioTrackIndex, audioFormat, this);
|
/frameworks/base/media/java/android/media/projection/ |
MediaProjection.java | 148 int audioFormat, int bufferSizeInBytes) {
|
/external/drrickorang/LoopbackApp/app/src/main/java/org/drrickorang/loopback/ |
RecorderRunnable.java | 20 import android.media.AudioFormat; 49 private int mChannelConfig = AudioFormat.CHANNEL_IN_MONO; 50 private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT; 88 RecorderRunnable(PipeShort latencyPipe, int samplingRate, int channelConfig, int audioFormat, 96 mAudioFormat = audioFormat; 135 new AudioFormat.Builder() 136 .setChannelMask(AudioFormat.CHANNEL_IN_MONO) : 137 new AudioFormat 202 new AudioFormat.Builder() 203 .setChannelMask(AudioFormat.CHANNEL_IN_MONO) [all...] |