/external/webrtc/src/modules/audio_processing/ |
audio_buffer.h | 44 void set_activity(AudioFrame::VADActivity activity); 45 AudioFrame::VADActivity activity() const; 49 void DeinterleaveFrom(AudioFrame* audioFrame); 50 void InterleaveTo(AudioFrame* audioFrame) const; 53 void InterleaveTo(AudioFrame* frame, bool data_changed) const; 69 AudioFrame::VADActivity activity_;
|
audio_processing_impl.h | 68 virtual int ProcessStream(AudioFrame* frame); 69 virtual int AnalyzeReverseStream(AudioFrame* frame);
|
audio_buffer.cc | 74 activity_(AudioFrame::kVadUnknown), 165 void AudioBuffer::set_activity(AudioFrame::VADActivity activity) { 169 AudioFrame::VADActivity AudioBuffer::activity() const { 190 void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) { 222 void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) const {
|
voice_detection_impl.cc | 83 audio->set_activity(AudioFrame::kVadPassive); 86 audio->set_activity(AudioFrame::kVadActive);
|
audio_processing_impl.cc | 256 int AudioProcessingImpl::ProcessStream(AudioFrame* frame) { 387 int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
|
/external/chromium_org/third_party/libjingle/source/talk/media/base/ |
audioframe.h | 33 class AudioFrame { 35 AudioFrame() 41 AudioFrame(int16* audio, size_t audio_length, int sample_freq, bool stereo)
|
voiceprocessor.h | 33 #include "talk/media/base/audioframe.h" 52 AudioFrame* frame) = 0;
|
fakemediaprocessor.h | 36 class AudioFrame; 50 AudioFrame* frame) {
|
/frameworks/base/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/ |
AudioTrackDecoder.java | 98 public void grabSample(FrameValue audioFrame) { 100 if (audioFrame != null) { 103 audioFrame.setValue(sample); 104 audioFrame.setTimestamp(mAudioPresentationTimeUs * 1000);
|
/external/webrtc/src/modules/interface/ |
module_common_types.h | 687 * AudioFrame class 689 * The AudioFrame class holds up to 60 ms wideband 710 class AudioFrame 730 AudioFrame(); 731 virtual ~AudioFrame(); 745 AudioFrame& Append(const AudioFrame& rhs); 749 AudioFrame& operator=(const AudioFrame& rhs); 750 AudioFrame& operator>>=(const WebRtc_Word32 rhs) [all...] |
/external/chromium_org/content/renderer/media/ |
media_stream_audio_processor.h | 25 class AudioFrame; 94 void ProcessData(webrtc::AudioFrame* audio_frame, 113 // AudioFrame used to hold the output of |capture_converter_|. 114 webrtc::AudioFrame capture_frame_; 120 // AudioFrame used to hold the output of |render_converter_|. 121 webrtc::AudioFrame render_frame_;
|
media_stream_audio_processor_options.h | 12 class AudioFrame;
|
media_stream_audio_processor.cc | 69 bool Convert(webrtc::AudioFrame* out) { 89 out->speech_type_ = webrtc::AudioFrame::kNormalSpeech; 90 out->vad_activity_ = webrtc::AudioFrame::kVadUnknown; 309 void MediaStreamAudioProcessor::ProcessData(webrtc::AudioFrame* audio_frame,
|
/external/webrtc/src/modules/audio_processing/test/ |
unit_test.cc | 30 using webrtc::AudioFrame; 68 webrtc::AudioFrame* frame_; 69 webrtc::AudioFrame* revframe_; 92 frame_ = new AudioFrame(); 93 revframe_ = new AudioFrame(); 163 void SetFrameTo(AudioFrame* frame, int16_t value) { 170 int16_t MaxAudioFrame(const AudioFrame& frame) { 180 bool FrameDataAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) { 265 AudioFrame primary_frame [all...] |
process_test.cc | 30 using webrtc::AudioFrame; 500 AudioFrame far_frame; 501 AudioFrame near_frame; [all...] |
/external/chromium_org/media/cast/audio_receiver/ |
audio_decoder.cc | 76 webrtc::AudioFrame webrtc_audio_frame; 81 if (webrtc_audio_frame.speech_type_ == webrtc::AudioFrame::kPLCCNG || 82 webrtc_audio_frame.speech_type_ == webrtc::AudioFrame::kUndefined) {
|
/external/webrtc/src/modules/audio_processing/interface/ |
audio_processing.h | 21 class AudioFrame; 156 virtual int ProcessStream(AudioFrame* frame) = 0; 172 virtual int AnalyzeReverseStream(AudioFrame* frame) = 0; 551 // |AudioFrame| passed to |ProcessStream()|. The |_vadActivity| member will be
|
/external/chromium_org/third_party/libjingle/source/talk/media/webrtc/ |
webrtcvoiceengine.h | 188 signal3<uint32, MediaProcessorDirection, AudioFrame*> FrameSignal;
|
fakewebrtcvoiceengine.h | [all...] |
webrtcvoiceengine.cc | [all...] |
/frameworks/av/media/libeffects/preprocessing/ |
PreProcessing.cpp | 115 webrtc::AudioFrame *procFrame; // audio frame passed to webRTC AMP ProcessStream() 129 webrtc::AudioFrame *revFrame; // audio frame passed to webRTC AMP AnalyzeReverseStream() 786 session->procFrame = new webrtc::AudioFrame(); 791 session->revFrame = new webrtc::AudioFrame(); [all...] |
/external/chromium_org/third_party/libjingle/source/talk/ |
libjingle.gyp | 791 'media/base/audioframe.h', [all...] |