Home | History | Annotate | Download | only in win
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 // Utility methods for the Core Audio API on Windows.
      6 // Always ensure that Core Audio is supported before using these methods.
      7 // Use media::CoreAudioUtil::IsSupported() for this purpose.
      8 // Also, all methods must be called on a valid COM thread. This can be done
      9 // by using the base::win::ScopedCOMInitializer helper class.
     10 
     11 #ifndef MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
     12 #define MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
     13 
     14 #include <audioclient.h>
     15 #include <mmdeviceapi.h>
     16 #include <string>
     17 
     18 #include "base/basictypes.h"
     19 #include "base/time/time.h"
     20 #include "base/win/scoped_comptr.h"
     21 #include "media/audio/audio_device_name.h"
     22 #include "media/audio/audio_parameters.h"
     23 #include "media/base/media_export.h"
     24 
     25 using base::win::ScopedComPtr;
     26 
     27 namespace media {
     28 
     29 
     30 // Represents audio channel configuration constants as understood by Windows.
     31 // E.g. KSAUDIO_SPEAKER_MONO.  For a list of possible values see:
     32 // http://msdn.microsoft.com/en-us/library/windows/hardware/ff537083(v=vs.85).aspx
     33 typedef uint32 ChannelConfig;
     34 
     35 class MEDIA_EXPORT CoreAudioUtil {
     36  public:
     37   // Returns true if Windows Core Audio is supported.
     38   // Always verify that this method returns true before using any of the
     39   // methods in this class.
     40   // WARNING: This function must be called once from the main thread before
     41   // it is safe to call from other threads.
     42   static bool IsSupported();
     43 
     44   // Converts between reference time to base::TimeDelta.
     45   // One reference-time unit is 100 nanoseconds.
     46   // Example: double s = RefererenceTimeToTimeDelta(t).InMillisecondsF();
     47   static base::TimeDelta RefererenceTimeToTimeDelta(REFERENCE_TIME time);
     48 
     49   // Returns AUDCLNT_SHAREMODE_EXCLUSIVE if --enable-exclusive-mode is used
     50   // as command-line flag and AUDCLNT_SHAREMODE_SHARED otherwise (default).
     51   static AUDCLNT_SHAREMODE GetShareMode();
     52 
     53   // The Windows Multimedia Device (MMDevice) API enables audio clients to
     54   // discover audio endpoint devices and determine their capabilities.
     55 
     56   // Number of active audio devices in the specified flow data flow direction.
     57   // Set |data_flow| to eAll to retrieve the total number of active audio
     58   // devices.
     59   static int NumberOfActiveDevices(EDataFlow data_flow);
     60 
     61   // Creates an IMMDeviceEnumerator interface which provides methods for
     62   // enumerating audio endpoint devices.
     63   static ScopedComPtr<IMMDeviceEnumerator> CreateDeviceEnumerator();
     64 
     65   // Creates a default endpoint device that is specified by a data-flow
     66   // direction and role, e.g. default render device.
     67   static ScopedComPtr<IMMDevice> CreateDefaultDevice(
     68       EDataFlow data_flow, ERole role);
     69 
     70   // Returns the device id of the default output device or an empty string
     71   // if no such device exists or if the default device has been disabled.
     72   static std::string GetDefaultOutputDeviceID();
     73 
     74   // Creates an endpoint device that is specified by a unique endpoint device-
     75   // identification string.
     76   static ScopedComPtr<IMMDevice> CreateDevice(const std::string& device_id);
     77 
     78   // Returns the unique ID and user-friendly name of a given endpoint device.
     79   // Example: "{0.0.1.00000000}.{8db6020f-18e3-4f25-b6f5-7726c9122574}", and
     80   //          "Microphone (Realtek High Definition Audio)".
     81   static HRESULT GetDeviceName(IMMDevice* device, AudioDeviceName* name);
     82 
     83   // Returns the device ID/path of the controller (a.k.a. physical device that
     84   // |device| is connected to.  This ID will be the same for all devices from
     85   // the same controller so it is useful for doing things like determining
     86   // whether a set of output and input devices belong to the same controller.
     87   // The device enumerator is required as well as the device itself since
     88   // looking at the device topology is required and we need to open up
     89   // associated devices to determine the controller id.
     90   // If the ID could not be determined for some reason, an empty string is
     91   // returned.
     92   static std::string GetAudioControllerID(IMMDevice* device,
     93       IMMDeviceEnumerator* enumerator);
     94 
     95   // Accepts an id of an input device and finds a matching output device id.
     96   // If the associated hardware does not have an audio output device (e.g.
     97   // a webcam with a mic), an empty string is returned.
     98   static std::string GetMatchingOutputDeviceID(
     99       const std::string& input_device_id);
    100 
    101   // Gets the user-friendly name of the endpoint device which is represented
    102   // by a unique id in |device_id|.
    103   static std::string GetFriendlyName(const std::string& device_id);
    104 
    105   // Returns true if the provided unique |device_id| corresponds to the current
    106   // default device for the specified by a data-flow direction and role.
    107   static bool DeviceIsDefault(
    108       EDataFlow flow, ERole role, const std::string& device_id);
    109 
    110   // Query if the audio device is a rendering device or a capture device.
    111   static EDataFlow GetDataFlow(IMMDevice* device);
    112 
    113   // The Windows Audio Session API (WASAPI) enables client applications to
    114   // manage the flow of audio data between the application and an audio endpoint
    115   // device.
    116 
    117   // Create an IAudioClient instance for the default IMMDevice where
    118   // flow direction and role is define by |data_flow| and |role|.
    119   // The IAudioClient interface enables a client to create and initialize an
    120   // audio stream between an audio application and the audio engine (for a
    121   // shared-mode stream) or the hardware buffer of an audio endpoint device
    122   // (for an exclusive-mode stream).
    123   static ScopedComPtr<IAudioClient> CreateDefaultClient(EDataFlow data_flow,
    124                                                         ERole role);
    125 
    126   // Create an IAudioClient instance for a specific device _or_ the default
    127   // device if |device_id| is empty.
    128   static ScopedComPtr<IAudioClient> CreateClient(const std::string& device_id,
    129                                                  EDataFlow data_flow,
    130                                                  ERole role);
    131 
    132   // Create an IAudioClient interface for an existing IMMDevice given by
    133   // |audio_device|. Flow direction and role is define by the |audio_device|.
    134   static ScopedComPtr<IAudioClient> CreateClient(IMMDevice* audio_device);
    135 
    136   // Get the mix format that the audio engine uses internally for processing
    137   // of shared-mode streams. This format is not necessarily a format that the
    138   // audio endpoint device supports. Thus, the caller might not succeed in
    139   // creating an exclusive-mode stream with a format obtained by this method.
    140   static HRESULT GetSharedModeMixFormat(IAudioClient* client,
    141                                         WAVEFORMATPCMEX* format);
    142 
    143   // Returns true if the specified |client| supports the format in |format|
    144   // for the given |share_mode| (shared or exclusive).
    145   static bool IsFormatSupported(IAudioClient* client,
    146                                 AUDCLNT_SHAREMODE share_mode,
    147                                 const WAVEFORMATPCMEX* format);
    148 
    149   // Returns true if the specified |channel_layout| is supported for the
    150   // default IMMDevice where flow direction and role is define by |data_flow|
    151   // and |role|. If this method returns true for a certain channel layout, it
    152   // means that SharedModeInitialize() will succeed using a format based on
    153   // the preferred format where the channel layout has been modified.
    154   static bool IsChannelLayoutSupported(const std::string& device_id,
    155                                        EDataFlow data_flow,
    156                                        ERole role,
    157                                        ChannelLayout channel_layout);
    158 
    159   // For a shared-mode stream, the audio engine periodically processes the
    160   // data in the endpoint buffer at the period obtained in |device_period|.
    161   // For an exclusive mode stream, |device_period| corresponds to the minimum
    162   // time interval between successive processing by the endpoint device.
    163   // This period plus the stream latency between the buffer and endpoint device
    164   // represents the minimum possible latency that an audio application can
    165   // achieve. The time in |device_period| is expressed in 100-nanosecond units.
    166   static HRESULT GetDevicePeriod(IAudioClient* client,
    167                                  AUDCLNT_SHAREMODE share_mode,
    168                                  REFERENCE_TIME* device_period);
    169 
    170   // Get the preferred audio parameters for the specified |client| or the
    171   // given direction and role is define by |data_flow| and |role|, or the
    172   // unique device id given by |device_id|.
    173   // The acquired values should only be utilized for shared mode streamed since
    174   // there are no preferred settings for an exclusive mode stream.
    175   static HRESULT GetPreferredAudioParameters(IAudioClient* client,
    176                                              AudioParameters* params);
    177   static HRESULT GetPreferredAudioParameters(EDataFlow data_flow, ERole role,
    178                                              AudioParameters* params);
    179   static HRESULT GetPreferredAudioParameters(const std::string& device_id,
    180                                              AudioParameters* params);
    181 
    182   // Retrieves an integer mask which corresponds to the channel layout the
    183   // audio engine uses for its internal processing/mixing of shared-mode
    184   // streams. This mask indicates which channels are present in the multi-
    185   // channel stream. The least significant bit corresponds with the Front Left
    186   // speaker, the next least significant bit corresponds to the Front Right
    187   // speaker, and so on, continuing in the order defined in KsMedia.h.
    188   // See http://msdn.microsoft.com/en-us/library/windows/hardware/ff537083(v=vs.85).aspx
    189   // for more details.
    190   // To get the channel config of the default device, pass an empty string
    191   // for |device_id|.
    192   static ChannelConfig GetChannelConfig(const std::string& device_id,
    193                                         EDataFlow data_flow);
    194 
    195   // After activating an IAudioClient interface on an audio endpoint device,
    196   // the client must initialize it once, and only once, to initialize the audio
    197   // stream between the client and the device. In shared mode, the client
    198   // connects indirectly through the audio engine which does the mixing.
    199   // In exclusive mode, the client connects directly to the audio hardware.
    200   // If a valid event is provided in |event_handle|, the client will be
    201   // initialized for event-driven buffer handling. If |event_handle| is set to
    202   // NULL, event-driven buffer handling is not utilized.
    203   static HRESULT SharedModeInitialize(IAudioClient* client,
    204                                       const WAVEFORMATPCMEX* format,
    205                                       HANDLE event_handle,
    206                                       uint32* endpoint_buffer_size);
    207   // TODO(henrika): add ExclusiveModeInitialize(...)
    208 
    209   // Create an IAudioRenderClient client for an existing IAudioClient given by
    210   // |client|. The IAudioRenderClient interface enables a client to write
    211   // output data to a rendering endpoint buffer.
    212   static ScopedComPtr<IAudioRenderClient> CreateRenderClient(
    213       IAudioClient* client);
    214 
    215   // Create an IAudioCaptureClient client for an existing IAudioClient given by
    216   // |client|. The IAudioCaptureClient interface enables a client to read
    217   // input data from a capture endpoint buffer.
    218   static ScopedComPtr<IAudioCaptureClient> CreateCaptureClient(
    219       IAudioClient* client);
    220 
    221   // Fills up the endpoint rendering buffer with silence for an existing
    222   // IAudioClient given by |client| and a corresponding IAudioRenderClient
    223   // given by |render_client|.
    224   static bool FillRenderEndpointBufferWithSilence(
    225       IAudioClient* client, IAudioRenderClient* render_client);
    226 
    227  private:
    228   CoreAudioUtil() {}
    229   ~CoreAudioUtil() {}
    230   DISALLOW_COPY_AND_ASSIGN(CoreAudioUtil);
    231 };
    232 
    233 }  // namespace media
    234 
    235 #endif  // MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
    236