Home | History | Annotate | Download | only in win
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 //
      5 // Implementation of AudioInputStream for Windows using Windows Core Audio
      6 // WASAPI for low latency capturing.
      7 //
      8 // Overview of operation:
      9 //
     10 // - An object of WASAPIAudioInputStream is created by the AudioManager
     11 //   factory.
     12 // - Next some thread will call Open(), at that point the underlying
     13 //   Core Audio APIs are utilized to create two WASAPI interfaces called
     14 //   IAudioClient and IAudioCaptureClient.
     15 // - Then some thread will call Start(sink).
     16 //   A thread called "wasapi_capture_thread" is started and this thread listens
     17 //   on an event signal which is set periodically by the audio engine for
     18 //   each recorded data packet. As a result, data samples will be provided
     19 //   to the registered sink.
     20 // - At some point, a thread will call Stop(), which stops and joins the
     21 //   capture thread and at the same time stops audio streaming.
     22 // - The same thread that called stop will call Close() where we cleanup
     23 //   and notify the audio manager, which likely will destroy this object.
     24 //
     25 // Implementation notes:
     26 //
     27 // - The minimum supported client is Windows Vista.
     28 // - This implementation is single-threaded, hence:
     29 //    o Construction and destruction must take place from the same thread.
     30 //    o It is recommended to call all APIs from the same thread as well.
     31 // - It is recommended to first acquire the native sample rate of the default
     32 //   input device and then use the same rate when creating this object. Use
     33 //   WASAPIAudioInputStream::HardwareSampleRate() to retrieve the sample rate.
     34 // - Calling Close() also leads to self destruction.
     35 //
     36 // Core Audio API details:
     37 //
     38 // - Utilized MMDevice interfaces:
     39 //     o IMMDeviceEnumerator
     40 //     o IMMDevice
     41 // - Utilized WASAPI interfaces:
     42 //     o IAudioClient
     43 //     o IAudioCaptureClient
     44 // - The stream is initialized in shared mode and the processing of the
     45 //   audio buffer is event driven.
     46 // - The Multimedia Class Scheduler service (MMCSS) is utilized to boost
     47 //   the priority of the capture thread.
     48 // - Audio applications that use the MMDevice API and WASAPI typically use
     49 //   the ISimpleAudioVolume interface to manage stream volume levels on a
     50 //   per-session basis. It is also possible to use of the IAudioEndpointVolume
     51 //   interface to control the master volume level of an audio endpoint device.
     52 //   This implementation is using the ISimpleAudioVolume interface.
     53 //   MSDN states that "In rare cases, a specialized audio application might
     54 //   require the use of the IAudioEndpointVolume".
     55 //
     56 #ifndef MEDIA_AUDIO_WIN_AUDIO_LOW_LATENCY_INPUT_WIN_H_
     57 #define MEDIA_AUDIO_WIN_AUDIO_LOW_LATENCY_INPUT_WIN_H_
     58 
     59 #include <Audioclient.h>
     60 #include <MMDeviceAPI.h>
     61 
     62 #include <string>
     63 
     64 #include "base/compiler_specific.h"
     65 #include "base/threading/non_thread_safe.h"
     66 #include "base/threading/platform_thread.h"
     67 #include "base/threading/simple_thread.h"
     68 #include "base/win/scoped_co_mem.h"
     69 #include "base/win/scoped_com_initializer.h"
     70 #include "base/win/scoped_comptr.h"
     71 #include "base/win/scoped_handle.h"
     72 #include "media/audio/agc_audio_stream.h"
     73 #include "media/audio/audio_parameters.h"
     74 #include "media/base/media_export.h"
     75 
     76 namespace media {
     77 
     78 class AudioBus;
     79 class AudioManagerWin;
     80 
     81 // AudioInputStream implementation using Windows Core Audio APIs.
     82 class MEDIA_EXPORT WASAPIAudioInputStream
     83     : public AgcAudioStream<AudioInputStream>,
     84       public base::DelegateSimpleThread::Delegate,
     85       NON_EXPORTED_BASE(public base::NonThreadSafe) {
     86  public:
     87   // The ctor takes all the usual parameters, plus |manager| which is the
     88   // the audio manager who is creating this object.
     89   WASAPIAudioInputStream(AudioManagerWin* manager,
     90                          const AudioParameters& params,
     91                          const std::string& device_id);
     92 
     93   // The dtor is typically called by the AudioManager only and it is usually
     94   // triggered by calling AudioInputStream::Close().
     95   virtual ~WASAPIAudioInputStream();
     96 
     97   // Implementation of AudioInputStream.
     98   virtual bool Open() OVERRIDE;
     99   virtual void Start(AudioInputCallback* callback) OVERRIDE;
    100   virtual void Stop() OVERRIDE;
    101   virtual void Close() OVERRIDE;
    102   virtual double GetMaxVolume() OVERRIDE;
    103   virtual void SetVolume(double volume) OVERRIDE;
    104   virtual double GetVolume() OVERRIDE;
    105   virtual bool IsMuted() OVERRIDE;
    106 
    107   bool started() const { return started_; }
    108 
    109   // Returns the default hardware audio parameters of the specific device.
    110   static AudioParameters GetInputStreamParameters(const std::string& device_id);
    111 
    112  private:
    113   // DelegateSimpleThread::Delegate implementation.
    114   virtual void Run() OVERRIDE;
    115 
    116   // Issues the OnError() callback to the |sink_|.
    117   void HandleError(HRESULT err);
    118 
    119   // The Open() method is divided into these sub methods.
    120   HRESULT SetCaptureDevice();
    121   HRESULT ActivateCaptureDevice();
    122   HRESULT GetAudioEngineStreamFormat();
    123   bool DesiredFormatIsSupported();
    124   HRESULT InitializeAudioEngine();
    125 
    126   // Retrieves the stream format that the audio engine uses for its internal
    127   // processing/mixing of shared-mode streams.
    128   // |effects| is a an AudioParameters::effects() flag that will have the
    129   // DUCKING flag raised for only the default communication device.
    130   static HRESULT GetMixFormat(const std::string& device_id,
    131                               WAVEFORMATEX** device_format,
    132                               int* effects);
    133 
    134   // Our creator, the audio manager needs to be notified when we close.
    135   AudioManagerWin* manager_;
    136 
    137   // Capturing is driven by this thread (which has no message loop).
    138   // All OnData() callbacks will be called from this thread.
    139   base::DelegateSimpleThread* capture_thread_;
    140 
    141   // Contains the desired audio format which is set up at construction.
    142   WAVEFORMATEX format_;
    143 
    144   bool opened_;
    145   bool started_;
    146 
    147   // Size in bytes of each audio frame (4 bytes for 16-bit stereo PCM)
    148   size_t frame_size_;
    149 
    150   // Size in audio frames of each audio packet where an audio packet
    151   // is defined as the block of data which the user received in each
    152   // OnData() callback.
    153   size_t packet_size_frames_;
    154 
    155   // Size in bytes of each audio packet.
    156   size_t packet_size_bytes_;
    157 
    158   // Length of the audio endpoint buffer.
    159   uint32 endpoint_buffer_size_frames_;
    160 
    161   // A copy of the supplied AudioParameter's |effects|.  If ducking was
    162   // specified (desired device=communications) but we ended up not being
    163   // able to open the communications device, this flag will be cleared.
    164   int effects_;
    165 
    166   // Contains the unique name of the selected endpoint device.
    167   // Note that AudioManagerBase::kDefaultDeviceId represents the default
    168   // device role and is not a valid ID as such.
    169   std::string device_id_;
    170 
    171   // Conversion factor used in delay-estimation calculations.
    172   // Converts a raw performance counter value to 100-nanosecond unit.
    173   double perf_count_to_100ns_units_;
    174 
    175   // Conversion factor used in delay-estimation calculations.
    176   // Converts from milliseconds to audio frames.
    177   double ms_to_frame_count_;
    178 
    179   // Pointer to the object that will receive the recorded audio samples.
    180   AudioInputCallback* sink_;
    181 
    182   // Windows Multimedia Device (MMDevice) API interfaces.
    183 
    184   // An IMMDevice interface which represents an audio endpoint device.
    185   base::win::ScopedComPtr<IMMDevice> endpoint_device_;
    186 
    187   // Windows Audio Session API (WASAPI) interfaces.
    188 
    189   // An IAudioClient interface which enables a client to create and initialize
    190   // an audio stream between an audio application and the audio engine.
    191   base::win::ScopedComPtr<IAudioClient> audio_client_;
    192 
    193   // Loopback IAudioClient doesn't support event-driven mode, so a separate
    194   // IAudioClient is needed to receive notifications when data is available in
    195   // the buffer. For loopback input |audio_client_| is used to receive data,
    196   // while |audio_render_client_for_loopback_| is used to get notifications
    197   // when a new buffer is ready. See comment in InitializeAudioEngine() for
    198   // details.
    199   base::win::ScopedComPtr<IAudioClient> audio_render_client_for_loopback_;
    200 
    201   // The IAudioCaptureClient interface enables a client to read input data
    202   // from a capture endpoint buffer.
    203   base::win::ScopedComPtr<IAudioCaptureClient> audio_capture_client_;
    204 
    205   // The ISimpleAudioVolume interface enables a client to control the
    206   // master volume level of an audio session.
    207   // The volume-level is a value in the range 0.0 to 1.0.
    208   // This interface does only work with shared-mode streams.
    209   base::win::ScopedComPtr<ISimpleAudioVolume> simple_audio_volume_;
    210 
    211   // The audio engine will signal this event each time a buffer has been
    212   // recorded.
    213   base::win::ScopedHandle audio_samples_ready_event_;
    214 
    215   // This event will be signaled when capturing shall stop.
    216   base::win::ScopedHandle stop_capture_event_;
    217 
    218   // Extra audio bus used for storage of deinterleaved data for the OnData
    219   // callback.
    220   scoped_ptr<media::AudioBus> audio_bus_;
    221 
    222   DISALLOW_COPY_AND_ASSIGN(WASAPIAudioInputStream);
    223 };
    224 
    225 }  // namespace media
    226 
    227 #endif  // MEDIA_AUDIO_WIN_AUDIO_LOW_LATENCY_INPUT_WIN_H_
    228