Home | History | Annotate | Download | only in win
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 // Utility methods for the Core Audio API on Windows.
      6 // Always ensure that Core Audio is supported before using these methods.
      7 // Use media::CoreAudioIsSupported() for this purpose.
      8 // Also, all methods must be called on a valid COM thread. This can be done
      9 // by using the base::win::ScopedCOMInitializer helper class.
     10 
     11 #ifndef MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
     12 #define MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
     13 
     14 #include <audioclient.h>
     15 #include <mmdeviceapi.h>
     16 #include <string>
     17 
     18 #include "base/basictypes.h"
     19 #include "base/time/time.h"
     20 #include "base/win/scoped_comptr.h"
     21 #include "media/audio/audio_device_name.h"
     22 #include "media/audio/audio_parameters.h"
     23 #include "media/base/media_export.h"
     24 
     25 using base::win::ScopedComPtr;
     26 
     27 namespace media {
     28 
     29 class MEDIA_EXPORT CoreAudioUtil {
     30  public:
     31   // Returns true if Windows Core Audio is supported.
     32   // Always verify that this method returns true before using any of the
     33   // methods in this class.
     34   static bool IsSupported();
     35 
     36   // Converts between reference time to base::TimeDelta.
     37   // One reference-time unit is 100 nanoseconds.
     38   // Example: double s = RefererenceTimeToTimeDelta(t).InMillisecondsF();
     39   static base::TimeDelta RefererenceTimeToTimeDelta(REFERENCE_TIME time);
     40 
     41   // Returns AUDCLNT_SHAREMODE_EXCLUSIVE if --enable-exclusive-mode is used
     42   // as command-line flag and AUDCLNT_SHAREMODE_SHARED otherwise (default).
     43   static AUDCLNT_SHAREMODE GetShareMode();
     44 
     45   // The Windows Multimedia Device (MMDevice) API enables audio clients to
     46   // discover audio endpoint devices and determine their capabilities.
     47 
     48   // Number of active audio devices in the specified flow data flow direction.
     49   // Set |data_flow| to eAll to retrieve the total number of active audio
     50   // devices.
     51   static int NumberOfActiveDevices(EDataFlow data_flow);
     52 
     53   // Creates an IMMDeviceEnumerator interface which provides methods for
     54   // enumerating audio endpoint devices.
     55   static ScopedComPtr<IMMDeviceEnumerator> CreateDeviceEnumerator();
     56 
     57   // Creates a default endpoint device that is specified by a data-flow
     58   // direction and role, e.g. default render device.
     59   static ScopedComPtr<IMMDevice> CreateDefaultDevice(
     60       EDataFlow data_flow, ERole role);
     61 
     62   // Creates an endpoint device that is specified by a unique endpoint device-
     63   // identification string.
     64   static ScopedComPtr<IMMDevice> CreateDevice(const std::string& device_id);
     65 
     66   // Returns the unique ID and user-friendly name of a given endpoint device.
     67   // Example: "{0.0.1.00000000}.{8db6020f-18e3-4f25-b6f5-7726c9122574}", and
     68   //          "Microphone (Realtek High Definition Audio)".
     69   static HRESULT GetDeviceName(IMMDevice* device, AudioDeviceName* name);
     70 
     71   // Gets the user-friendly name of the endpoint device which is represented
     72   // by a unique id in |device_id|.
     73   static std::string GetFriendlyName(const std::string& device_id);
     74 
     75   // Returns true if the provided unique |device_id| corresponds to the current
     76   // default device for the specified by a data-flow direction and role.
     77   static bool DeviceIsDefault(
     78       EDataFlow flow, ERole role, const std::string& device_id);
     79 
     80   // Query if the audio device is a rendering device or a capture device.
     81   static EDataFlow GetDataFlow(IMMDevice* device);
     82 
     83   // The Windows Audio Session API (WASAPI) enables client applications to
     84   // manage the flow of audio data between the application and an audio endpoint
     85   // device.
     86 
     87   // Create an IAudioClient interface for the default IMMDevice where
     88   // flow direction and role is define by |data_flow| and |role|.
     89   // The IAudioClient interface enables a client to create and initialize an
     90   // audio stream between an audio application and the audio engine (for a
     91   // shared-mode stream) or the hardware buffer of an audio endpoint device
     92   // (for an exclusive-mode stream).
     93   static ScopedComPtr<IAudioClient> CreateDefaultClient(EDataFlow data_flow,
     94                                                         ERole role);
     95 
     96   // Create an IAudioClient interface for an existing IMMDevice given by
     97   // |audio_device|. Flow direction and role is define by the |audio_device|.
     98   static ScopedComPtr<IAudioClient> CreateClient(IMMDevice* audio_device);
     99 
    100   // Get the mix format that the audio engine uses internally for processing
    101   // of shared-mode streams. This format is not necessarily a format that the
    102   // audio endpoint device supports. Thus, the caller might not succeed in
    103   // creating an exclusive-mode stream with a format obtained by this method.
    104   static HRESULT GetSharedModeMixFormat(IAudioClient* client,
    105                                         WAVEFORMATPCMEX* format);
    106 
    107   // Get the mix format that the audio engine uses internally for processing
    108   // of shared-mode streams using the default IMMDevice where flow direction
    109   // and role is define by |data_flow| and |role|.
    110   static HRESULT GetDefaultSharedModeMixFormat(EDataFlow data_flow,
    111                                                ERole role,
    112                                                WAVEFORMATPCMEX* format);
    113 
    114   // Returns true if the specified |client| supports the format in |format|
    115   // for the given |share_mode| (shared or exclusive).
    116   static bool IsFormatSupported(IAudioClient* client,
    117                                 AUDCLNT_SHAREMODE share_mode,
    118                                 const WAVEFORMATPCMEX* format);
    119 
    120   // Returns true if the specified |channel_layout| is supported for the
    121   // default IMMDevice where flow direction and role is define by |data_flow|
    122   // and |role|. If this method returns true for a certain channel layout, it
    123   // means that SharedModeInitialize() will succeed using a format based on
    124   // the preferred format where the channel layout has been modified.
    125   static bool IsChannelLayoutSupported(EDataFlow data_flow, ERole role,
    126                                        ChannelLayout channel_layout);
    127 
    128   // For a shared-mode stream, the audio engine periodically processes the
    129   // data in the endpoint buffer at the period obtained in |device_period|.
    130   // For an exclusive mode stream, |device_period| corresponds to the minimum
    131   // time interval between successive processing by the endpoint device.
    132   // This period plus the stream latency between the buffer and endpoint device
    133   // represents the minimum possible latency that an audio application can
    134   // achieve. The time in |device_period| is expressed in 100-nanosecond units.
    135   static HRESULT GetDevicePeriod(IAudioClient* client,
    136                                  AUDCLNT_SHAREMODE share_mode,
    137                                  REFERENCE_TIME* device_period);
    138 
    139   // Get the preferred audio parameters for the specified |client| or the
    140   // given direction and role is define by |data_flow| and |role|, or the
    141   // unique device id given by |device_id|.
    142   // The acquired values should only be utilized for shared mode streamed since
    143   // there are no preferred settings for an exclusive mode stream.
    144   static HRESULT GetPreferredAudioParameters(IAudioClient* client,
    145                                              AudioParameters* params);
    146   static HRESULT GetPreferredAudioParameters(EDataFlow data_flow, ERole role,
    147                                              AudioParameters* params);
    148   static HRESULT GetPreferredAudioParameters(const std::string& device_id,
    149                                              AudioParameters* params);
    150 
    151   // After activating an IAudioClient interface on an audio endpoint device,
    152   // the client must initialize it once, and only once, to initialize the audio
    153   // stream between the client and the device. In shared mode, the client
    154   // connects indirectly through the audio engine which does the mixing.
    155   // In exclusive mode, the client connects directly to the audio hardware.
    156   // If a valid event is provided in |event_handle|, the client will be
    157   // initialized for event-driven buffer handling. If |event_handle| is set to
    158   // NULL, event-driven buffer handling is not utilized.
    159   static HRESULT SharedModeInitialize(IAudioClient* client,
    160                                       const WAVEFORMATPCMEX* format,
    161                                       HANDLE event_handle,
    162                                       uint32* endpoint_buffer_size);
    163   // TODO(henrika): add ExclusiveModeInitialize(...)
    164 
    165   // Create an IAudioRenderClient client for an existing IAudioClient given by
    166   // |client|. The IAudioRenderClient interface enables a client to write
    167   // output data to a rendering endpoint buffer.
    168   static ScopedComPtr<IAudioRenderClient> CreateRenderClient(
    169       IAudioClient* client);
    170 
    171   // Create an IAudioCaptureClient client for an existing IAudioClient given by
    172   // |client|. The IAudioCaptureClient interface enables a client to read
    173   // input data from a capture endpoint buffer.
    174   static ScopedComPtr<IAudioCaptureClient> CreateCaptureClient(
    175       IAudioClient* client);
    176 
    177   // Fills up the endpoint rendering buffer with silence for an existing
    178   // IAudioClient given by |client| and a corresponding IAudioRenderClient
    179   // given by |render_client|.
    180   static bool FillRenderEndpointBufferWithSilence(
    181       IAudioClient* client, IAudioRenderClient* render_client);
    182 
    183  private:
    184   CoreAudioUtil() {}
    185   ~CoreAudioUtil() {}
    186   DISALLOW_COPY_AND_ASSIGN(CoreAudioUtil);
    187 };
    188 
    189 }  // namespace media
    190 
    191 #endif  // MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
    192