Home | History | Annotate | Download | only in media
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef CONTENT_RENDERER_MEDIA_WEBRTC_AUDIO_RENDERER_H_
      6 #define CONTENT_RENDERER_MEDIA_WEBRTC_AUDIO_RENDERER_H_
      7 
      8 #include "base/memory/ref_counted.h"
      9 #include "base/synchronization/lock.h"
     10 #include "base/threading/non_thread_safe.h"
     11 #include "base/threading/thread_checker.h"
     12 #include "content/renderer/media/media_stream_audio_renderer.h"
     13 #include "content/renderer/media/webrtc_audio_device_impl.h"
     14 #include "media/base/audio_decoder.h"
     15 #include "media/base/audio_pull_fifo.h"
     16 #include "media/base/audio_renderer_sink.h"
     17 #include "media/base/channel_layout.h"
     18 
     19 namespace media {
     20 class AudioOutputDevice;
     21 }  // namespace media
     22 
     23 namespace webrtc {
     24 class AudioSourceInterface;
     25 class MediaStreamInterface;
     26 }  // namespace webrtc
     27 
     28 namespace content {
     29 
     30 class WebRtcAudioRendererSource;
     31 
     32 // This renderer handles calls from the pipeline and WebRtc ADM. It is used
     33 // for connecting WebRtc MediaStream with the audio pipeline.
     34 class CONTENT_EXPORT WebRtcAudioRenderer
     35     : NON_EXPORTED_BASE(public media::AudioRendererSink::RenderCallback),
     36       NON_EXPORTED_BASE(public MediaStreamAudioRenderer) {
     37  public:
     38   // This is a little utility class that holds the configured state of an audio
     39   // stream.
     40   // It is used by both WebRtcAudioRenderer and SharedAudioRenderer (see cc
     41   // file) so a part of why it exists is to avoid code duplication and track
     42   // the state in the same way in WebRtcAudioRenderer and SharedAudioRenderer.
     43   class PlayingState : public base::NonThreadSafe {
     44    public:
     45     PlayingState() : playing_(false), volume_(1.0f) {}
     46 
     47     bool playing() const {
     48       DCHECK(CalledOnValidThread());
     49       return playing_;
     50     }
     51 
     52     void set_playing(bool playing) {
     53       DCHECK(CalledOnValidThread());
     54       playing_ = playing;
     55     }
     56 
     57     float volume() const {
     58       DCHECK(CalledOnValidThread());
     59       return volume_;
     60     }
     61 
     62     void set_volume(float volume) {
     63       DCHECK(CalledOnValidThread());
     64       volume_ = volume;
     65     }
     66 
     67    private:
     68     bool playing_;
     69     float volume_;
     70   };
     71 
     72   WebRtcAudioRenderer(
     73       const scoped_refptr<webrtc::MediaStreamInterface>& media_stream,
     74       int source_render_view_id,
     75       int source_render_frame_id,
     76       int session_id,
     77       int sample_rate,
     78       int frames_per_buffer);
     79 
     80   // Initialize function called by clients like WebRtcAudioDeviceImpl.
     81   // Stop() has to be called before |source| is deleted.
     82   bool Initialize(WebRtcAudioRendererSource* source);
     83 
     84   // When sharing a single instance of WebRtcAudioRenderer between multiple
     85   // users (e.g. WebMediaPlayerMS), call this method to create a proxy object
     86   // that maintains the Play and Stop states per caller.
     87   // The wrapper ensures that Play() won't be called when the caller's state
     88   // is "playing", Pause() won't be called when the state already is "paused"
     89   // etc and similarly maintains the same state for Stop().
     90   // When Stop() is called or when the proxy goes out of scope, the proxy
     91   // will ensure that Pause() is called followed by a call to Stop(), which
     92   // is the usage pattern that WebRtcAudioRenderer requires.
     93   scoped_refptr<MediaStreamAudioRenderer> CreateSharedAudioRendererProxy(
     94       const scoped_refptr<webrtc::MediaStreamInterface>& media_stream);
     95 
     96   // Used to DCHECK on the expected state.
     97   bool IsStarted() const;
     98 
     99   // Accessors to the sink audio parameters.
    100   int channels() const { return sink_params_.channels(); }
    101   int sample_rate() const { return sink_params_.sample_rate(); }
    102   int frames_per_buffer() const { return sink_params_.frames_per_buffer(); }
    103 
    104  private:
    105   // MediaStreamAudioRenderer implementation.  This is private since we want
    106   // callers to use proxy objects.
    107   // TODO(tommi): Make the MediaStreamAudioRenderer implementation a pimpl?
    108   virtual void Start() OVERRIDE;
    109   virtual void Play() OVERRIDE;
    110   virtual void Pause() OVERRIDE;
    111   virtual void Stop() OVERRIDE;
    112   virtual void SetVolume(float volume) OVERRIDE;
    113   virtual base::TimeDelta GetCurrentRenderTime() const OVERRIDE;
    114   virtual bool IsLocalRenderer() const OVERRIDE;
    115 
    116   // Called when an audio renderer, either the main or a proxy, starts playing.
    117   // Here we maintain a reference count of how many renderers are currently
    118   // playing so that the shared play state of all the streams can be reflected
    119   // correctly.
    120   void EnterPlayState();
    121 
    122   // Called when an audio renderer, either the main or a proxy, is paused.
    123   // See EnterPlayState for more details.
    124   void EnterPauseState();
    125 
    126  protected:
    127   virtual ~WebRtcAudioRenderer();
    128 
    129  private:
    130   enum State {
    131     UNINITIALIZED,
    132     PLAYING,
    133     PAUSED,
    134   };
    135 
    136   // Holds raw pointers to PlaingState objects.  Ownership is managed outside
    137   // of this type.
    138   typedef std::vector<PlayingState*> PlayingStates;
    139   // Maps an audio source to a list of playing states that collectively hold
    140   // volume information for that source.
    141   typedef std::map<webrtc::AudioSourceInterface*, PlayingStates>
    142       SourcePlayingStates;
    143 
    144   // Used to DCHECK that we are called on the correct thread.
    145   base::ThreadChecker thread_checker_;
    146 
    147   // Flag to keep track the state of the renderer.
    148   State state_;
    149 
    150   // media::AudioRendererSink::RenderCallback implementation.
    151   // These two methods are called on the AudioOutputDevice worker thread.
    152   virtual int Render(media::AudioBus* audio_bus,
    153                      int audio_delay_milliseconds) OVERRIDE;
    154   virtual void OnRenderError() OVERRIDE;
    155 
    156   // Called by AudioPullFifo when more data is necessary.
    157   // This method is called on the AudioOutputDevice worker thread.
    158   void SourceCallback(int fifo_frame_delay, media::AudioBus* audio_bus);
    159 
    160   // Goes through all renderers for the |source| and applies the proper
    161   // volume scaling for the source based on the volume(s) of the renderer(s).
    162   void UpdateSourceVolume(webrtc::AudioSourceInterface* source);
    163 
    164   // Tracks a playing state.  The state must be playing when this method
    165   // is called.
    166   // Returns true if the state was added, false if it was already being tracked.
    167   bool AddPlayingState(webrtc::AudioSourceInterface* source,
    168                        PlayingState* state);
    169   // Removes a playing state for an audio source.
    170   // Returns true if the state was removed from the internal map, false if
    171   // it had already been removed or if the source isn't being rendered.
    172   bool RemovePlayingState(webrtc::AudioSourceInterface* source,
    173                           PlayingState* state);
    174 
    175   // Called whenever the Play/Pause state changes of any of the renderers
    176   // or if the volume of any of them is changed.
    177   // Here we update the shared Play state and apply volume scaling to all audio
    178   // sources associated with the |media_stream| based on the collective volume
    179   // of playing renderers.
    180   void OnPlayStateChanged(
    181       const scoped_refptr<webrtc::MediaStreamInterface>& media_stream,
    182       PlayingState* state);
    183 
    184   // The render view and frame in which the audio is rendered into |sink_|.
    185   const int source_render_view_id_;
    186   const int source_render_frame_id_;
    187   const int session_id_;
    188 
    189   // The sink (destination) for rendered audio.
    190   scoped_refptr<media::AudioOutputDevice> sink_;
    191 
    192   // The media stream that holds the audio tracks that this renderer renders.
    193   const scoped_refptr<webrtc::MediaStreamInterface> media_stream_;
    194 
    195   // Audio data source from the browser process.
    196   WebRtcAudioRendererSource* source_;
    197 
    198   // Protects access to |state_|, |source_|, |sink_| and |current_time_|.
    199   mutable base::Lock lock_;
    200 
    201   // Ref count for the MediaPlayers which are playing audio.
    202   int play_ref_count_;
    203 
    204   // Ref count for the MediaPlayers which have called Start() but not Stop().
    205   int start_ref_count_;
    206 
    207   // Used to buffer data between the client and the output device in cases where
    208   // the client buffer size is not the same as the output device buffer size.
    209   scoped_ptr<media::AudioPullFifo> audio_fifo_;
    210 
    211   // Contains the accumulated delay estimate which is provided to the WebRTC
    212   // AEC.
    213   int audio_delay_milliseconds_;
    214 
    215   // Delay due to the FIFO in milliseconds.
    216   int fifo_delay_milliseconds_;
    217 
    218   base::TimeDelta current_time_;
    219 
    220   // Saved volume and playing state of the root renderer.
    221   PlayingState playing_state_;
    222 
    223   // Audio params used by the sink of the renderer.
    224   media::AudioParameters sink_params_;
    225 
    226   // Maps audio sources to a list of active audio renderers.
    227   // Pointers to PlayingState objects are only kept in this map while the
    228   // associated renderer is actually playing the stream.  Ownership of the
    229   // state objects lies with the renderers and they must leave the playing state
    230   // before being destructed (PlayingState object goes out of scope).
    231   SourcePlayingStates source_playing_states_;
    232 
    233   // Used for triggering new UMA histogram. Counts number of render
    234   // callbacks modulo |kNumCallbacksBetweenRenderTimeHistograms|.
    235   int render_callback_count_;
    236 
    237   DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioRenderer);
    238 };
    239 
    240 }  // namespace content
    241 
    242 #endif  // CONTENT_RENDERER_MEDIA_WEBRTC_AUDIO_RENDERER_H_
    243