Home | History | Annotate | Download | only in media
      1 // Copyright 2013 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "chrome/renderer/media/cast_rtp_stream.h"
      6 
      7 #include "base/bind.h"
      8 #include "base/debug/trace_event.h"
      9 #include "base/logging.h"
     10 #include "base/memory/weak_ptr.h"
     11 #include "base/strings/stringprintf.h"
     12 #include "base/sys_info.h"
     13 #include "chrome/renderer/media/cast_session.h"
     14 #include "chrome/renderer/media/cast_udp_transport.h"
     15 #include "content/public/renderer/media_stream_audio_sink.h"
     16 #include "content/public/renderer/media_stream_video_sink.h"
     17 #include "content/public/renderer/render_thread.h"
     18 #include "content/public/renderer/video_encode_accelerator.h"
     19 #include "media/audio/audio_parameters.h"
     20 #include "media/base/audio_bus.h"
     21 #include "media/base/audio_fifo.h"
     22 #include "media/base/bind_to_current_loop.h"
     23 #include "media/base/multi_channel_resampler.h"
     24 #include "media/base/video_frame.h"
     25 #include "media/cast/cast_config.h"
     26 #include "media/cast/cast_defines.h"
     27 #include "media/cast/cast_sender.h"
     28 #include "media/cast/net/cast_transport_config.h"
     29 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
     30 #include "ui/gfx/geometry/size.h"
     31 
     32 using media::cast::AudioSenderConfig;
     33 using media::cast::VideoSenderConfig;
     34 
     35 namespace {
     36 
     37 const char kCodecNameOpus[] = "OPUS";
     38 const char kCodecNameVp8[] = "VP8";
     39 const char kCodecNameH264[] = "H264";
     40 
     41 // To convert from kilobits per second to bits to per second.
     42 const int kBitrateMultiplier = 1000;
     43 
     44 // This constant defines the number of sets of audio data to buffer
     45 // in the FIFO. If input audio and output data have different resampling
     46 // rates then buffer is necessary to avoid audio glitches.
     47 // See CastAudioSink::ResampleData() and CastAudioSink::OnSetFormat()
     48 // for more defaults.
     49 const int kBufferAudioData = 2;
     50 
     51 CastRtpPayloadParams DefaultOpusPayload() {
     52   CastRtpPayloadParams payload;
     53   payload.payload_type = 127;
     54   payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
     55   payload.ssrc = 1;
     56   payload.feedback_ssrc = 2;
     57   payload.clock_rate = media::cast::kDefaultAudioSamplingRate;
     58   // The value is 0 which means VBR.
     59   payload.min_bitrate = payload.max_bitrate =
     60       media::cast::kDefaultAudioEncoderBitrate;
     61   payload.channels = 2;
     62   payload.max_frame_rate = 100;  // 10 ms audio frames
     63   payload.codec_name = kCodecNameOpus;
     64   return payload;
     65 }
     66 
     67 CastRtpPayloadParams DefaultVp8Payload() {
     68   CastRtpPayloadParams payload;
     69   payload.payload_type = 96;
     70   payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
     71   payload.ssrc = 11;
     72   payload.feedback_ssrc = 12;
     73   payload.clock_rate = media::cast::kVideoFrequency;
     74   payload.max_bitrate = 2000;
     75   payload.min_bitrate = 50;
     76   payload.channels = 1;
     77   payload.max_frame_rate = media::cast::kDefaultMaxFrameRate;
     78   payload.width = 1280;
     79   payload.height = 720;
     80   payload.codec_name = kCodecNameVp8;
     81   return payload;
     82 }
     83 
     84 CastRtpPayloadParams DefaultH264Payload() {
     85   CastRtpPayloadParams payload;
     86   // TODO(hshi): set different ssrc/rtpPayloadType values for H264 and VP8
     87   // once b/13696137 is fixed.
     88   payload.payload_type = 96;
     89   payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
     90   payload.ssrc = 11;
     91   payload.feedback_ssrc = 12;
     92   payload.clock_rate = media::cast::kVideoFrequency;
     93   payload.max_bitrate = 2000;
     94   payload.min_bitrate = 50;
     95   payload.channels = 1;
     96   payload.max_frame_rate = media::cast::kDefaultMaxFrameRate;
     97   payload.width = 1280;
     98   payload.height = 720;
     99   payload.codec_name = kCodecNameH264;
    100   return payload;
    101 }
    102 
    103 bool IsHardwareVP8EncodingSupported() {
    104   // Query for hardware VP8 encoder support.
    105   std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
    106       content::GetSupportedVideoEncodeAcceleratorProfiles();
    107   for (size_t i = 0; i < vea_profiles.size(); ++i) {
    108     if (vea_profiles[i].profile >= media::VP8PROFILE_MIN &&
    109         vea_profiles[i].profile <= media::VP8PROFILE_MAX) {
    110       return true;
    111     }
    112   }
    113   return false;
    114 }
    115 
    116 bool IsHardwareH264EncodingSupported() {
    117   // Query for hardware H.264 encoder support.
    118   std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
    119       content::GetSupportedVideoEncodeAcceleratorProfiles();
    120   for (size_t i = 0; i < vea_profiles.size(); ++i) {
    121     if (vea_profiles[i].profile >= media::H264PROFILE_MIN &&
    122         vea_profiles[i].profile <= media::H264PROFILE_MAX) {
    123       return true;
    124     }
    125   }
    126   return false;
    127 }
    128 
    129 int NumberOfEncodeThreads() {
    130   // We want to give CPU cycles for capturing and not to saturate the system
    131   // just for encoding. So on a lower end system with only 1 or 2 cores we
    132   // use only one thread for encoding.
    133   if (base::SysInfo::NumberOfProcessors() <= 2)
    134     return 1;
    135 
    136   // On higher end we want to use 2 threads for encoding to reduce latency.
    137   // In theory a physical CPU core has maximum 2 hyperthreads. Having 3 or
    138   // more logical processors means the system has at least 2 physical cores.
    139   return 2;
    140 }
    141 
    142 std::vector<CastRtpParams> SupportedAudioParams() {
    143   // TODO(hclam): Fill in more codecs here.
    144   std::vector<CastRtpParams> supported_params;
    145   supported_params.push_back(CastRtpParams(DefaultOpusPayload()));
    146   return supported_params;
    147 }
    148 
    149 std::vector<CastRtpParams> SupportedVideoParams() {
    150   std::vector<CastRtpParams> supported_params;
    151   if (IsHardwareH264EncodingSupported())
    152     supported_params.push_back(CastRtpParams(DefaultH264Payload()));
    153   supported_params.push_back(CastRtpParams(DefaultVp8Payload()));
    154   return supported_params;
    155 }
    156 
    157 bool ToAudioSenderConfig(const CastRtpParams& params,
    158                          AudioSenderConfig* config) {
    159   config->ssrc = params.payload.ssrc;
    160   config->incoming_feedback_ssrc = params.payload.feedback_ssrc;
    161   if (config->ssrc == config->incoming_feedback_ssrc)
    162     return false;
    163   config->min_playout_delay =
    164       base::TimeDelta::FromMilliseconds(
    165           params.payload.min_latency_ms ?
    166           params.payload.min_latency_ms :
    167           params.payload.max_latency_ms);
    168   config->max_playout_delay =
    169       base::TimeDelta::FromMilliseconds(params.payload.max_latency_ms);
    170   if (config->min_playout_delay <= base::TimeDelta())
    171     return false;
    172   if (config->min_playout_delay > config->max_playout_delay)
    173     return false;
    174   config->rtp_payload_type = params.payload.payload_type;
    175   config->use_external_encoder = false;
    176   config->frequency = params.payload.clock_rate;
    177   if (config->frequency < 8000)
    178     return false;
    179   config->channels = params.payload.channels;
    180   if (config->channels < 1)
    181     return false;
    182   config->bitrate = params.payload.max_bitrate * kBitrateMultiplier;
    183   if (params.payload.codec_name == kCodecNameOpus)
    184     config->codec = media::cast::CODEC_AUDIO_OPUS;
    185   else
    186     return false;
    187   config->aes_key = params.payload.aes_key;
    188   config->aes_iv_mask = params.payload.aes_iv_mask;
    189   return true;
    190 }
    191 
    192 bool ToVideoSenderConfig(const CastRtpParams& params,
    193                          VideoSenderConfig* config) {
    194   config->ssrc = params.payload.ssrc;
    195   config->incoming_feedback_ssrc = params.payload.feedback_ssrc;
    196   if (config->ssrc == config->incoming_feedback_ssrc)
    197     return false;
    198   config->min_playout_delay =
    199       base::TimeDelta::FromMilliseconds(
    200           params.payload.min_latency_ms ?
    201           params.payload.min_latency_ms :
    202           params.payload.max_latency_ms);
    203   config->max_playout_delay =
    204       base::TimeDelta::FromMilliseconds(params.payload.max_latency_ms);
    205   if (config->min_playout_delay <= base::TimeDelta())
    206     return false;
    207   if (config->min_playout_delay > config->max_playout_delay)
    208     return false;
    209   config->rtp_payload_type = params.payload.payload_type;
    210   config->width = params.payload.width;
    211   config->height = params.payload.height;
    212   if (config->width < 2 || config->height < 2)
    213     return false;
    214   config->min_bitrate = config->start_bitrate =
    215       params.payload.min_bitrate * kBitrateMultiplier;
    216   config->max_bitrate = params.payload.max_bitrate * kBitrateMultiplier;
    217   if (config->min_bitrate > config->max_bitrate)
    218     return false;
    219   config->start_bitrate = config->min_bitrate;
    220   config->max_frame_rate = static_cast<int>(
    221       std::max(1.0, params.payload.max_frame_rate) + 0.5);
    222   if (config->max_frame_rate > 120)
    223     return false;
    224   if (params.payload.codec_name == kCodecNameVp8) {
    225     config->use_external_encoder = IsHardwareVP8EncodingSupported();
    226     config->codec = media::cast::CODEC_VIDEO_VP8;
    227   } else if (params.payload.codec_name == kCodecNameH264) {
    228     config->use_external_encoder = IsHardwareH264EncodingSupported();
    229     config->codec = media::cast::CODEC_VIDEO_H264;
    230   } else {
    231     return false;
    232   }
    233   if (!config->use_external_encoder) {
    234     config->number_of_encode_threads = NumberOfEncodeThreads();
    235   }
    236   config->aes_key = params.payload.aes_key;
    237   config->aes_iv_mask = params.payload.aes_iv_mask;
    238   return true;
    239 }
    240 
    241 }  // namespace
    242 
    243 // This class receives MediaStreamTrack events and video frames from a
    244 // MediaStreamTrack.
    245 //
    246 // Threading: Video frames are received on the IO thread and then
    247 // forwarded to media::cast::VideoFrameInput through a static method.
    248 // Member variables of this class are only accessed on the render thread.
    249 class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>,
    250                       public content::MediaStreamVideoSink {
    251  public:
    252   // |track| provides data for this sink.
    253   // |expected_natural_size| is the expected dimension of the video frame.
    254   // |error_callback| is called if video formats don't match.
    255   CastVideoSink(const blink::WebMediaStreamTrack& track,
    256                 const gfx::Size& expected_natural_size,
    257                 const CastRtpStream::ErrorCallback& error_callback)
    258       : track_(track),
    259         sink_added_(false),
    260         expected_natural_size_(expected_natural_size),
    261         error_callback_(error_callback) {}
    262 
    263   virtual ~CastVideoSink() {
    264     if (sink_added_)
    265       RemoveFromVideoTrack(this, track_);
    266   }
    267 
    268   // This static method is used to forward video frames to |frame_input|.
    269   static void OnVideoFrame(
    270       // These parameters are already bound when callback is created.
    271       const gfx::Size& expected_natural_size,
    272       const CastRtpStream::ErrorCallback& error_callback,
    273       const scoped_refptr<media::cast::VideoFrameInput> frame_input,
    274       // These parameters are passed for each frame.
    275       const scoped_refptr<media::VideoFrame>& frame,
    276       const media::VideoCaptureFormat& format,
    277       const base::TimeTicks& estimated_capture_time) {
    278     if (frame->natural_size() != expected_natural_size) {
    279       error_callback.Run(
    280           base::StringPrintf("Video frame resolution does not match config."
    281                              " Expected %dx%d. Got %dx%d.",
    282                              expected_natural_size.width(),
    283                              expected_natural_size.height(),
    284                              frame->natural_size().width(),
    285                              frame->natural_size().height()));
    286       return;
    287     }
    288 
    289     base::TimeTicks timestamp;
    290     if (estimated_capture_time.is_null())
    291       timestamp = base::TimeTicks::Now();
    292     else
    293       timestamp = estimated_capture_time;
    294 
    295     // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
    296     TRACE_EVENT_INSTANT2(
    297         "cast_perf_test", "MediaStreamVideoSink::OnVideoFrame",
    298         TRACE_EVENT_SCOPE_THREAD,
    299         "timestamp",  timestamp.ToInternalValue(),
    300         "time_delta", frame->timestamp().ToInternalValue());
    301     frame_input->InsertRawVideoFrame(frame, timestamp);
    302   }
    303 
    304   // Attach this sink to a video track represented by |track_|.
    305   // Data received from the track will be submitted to |frame_input|.
    306   void AddToTrack(
    307       const scoped_refptr<media::cast::VideoFrameInput>& frame_input) {
    308     DCHECK(!sink_added_);
    309     sink_added_ = true;
    310     AddToVideoTrack(
    311         this,
    312         base::Bind(
    313             &CastVideoSink::OnVideoFrame,
    314             expected_natural_size_,
    315             error_callback_,
    316             frame_input),
    317         track_);
    318   }
    319 
    320  private:
    321   blink::WebMediaStreamTrack track_;
    322   bool sink_added_;
    323   gfx::Size expected_natural_size_;
    324   CastRtpStream::ErrorCallback error_callback_;
    325 
    326   DISALLOW_COPY_AND_ASSIGN(CastVideoSink);
    327 };
    328 
    329 // Receives audio data from a MediaStreamTrack. Data is submitted to
    330 // media::cast::FrameInput.
    331 //
    332 // Threading: Audio frames are received on the real-time audio thread.
    333 // Note that RemoveFromAudioTrack() is synchronous and we have
    334 // gurantee that there will be no more audio data after calling it.
    335 class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>,
    336                       public content::MediaStreamAudioSink {
    337  public:
    338   // |track| provides data for this sink.
    339   // |error_callback| is called if audio formats don't match.
    340   CastAudioSink(const blink::WebMediaStreamTrack& track,
    341                 const CastRtpStream::ErrorCallback& error_callback,
    342                 int output_channels,
    343                 int output_sample_rate)
    344       : track_(track),
    345         sink_added_(false),
    346         error_callback_(error_callback),
    347         weak_factory_(this),
    348         output_channels_(output_channels),
    349         output_sample_rate_(output_sample_rate),
    350         input_preroll_(0) {}
    351 
    352   virtual ~CastAudioSink() {
    353     if (sink_added_)
    354       RemoveFromAudioTrack(this, track_);
    355   }
    356 
    357   // Called on real-time audio thread.
    358   // content::MediaStreamAudioSink implementation.
    359   virtual void OnData(const int16* audio_data,
    360                       int sample_rate,
    361                       int number_of_channels,
    362                       int number_of_frames) OVERRIDE {
    363     scoped_ptr<media::AudioBus> input_bus;
    364     if (resampler_) {
    365       input_bus = ResampleData(
    366           audio_data, sample_rate, number_of_channels, number_of_frames);
    367       if (!input_bus)
    368         return;
    369     } else {
    370       input_bus = media::AudioBus::Create(
    371           number_of_channels, number_of_frames);
    372       input_bus->FromInterleaved(
    373           audio_data, number_of_frames, number_of_channels);
    374     }
    375 
    376     // TODO(hclam): Pass in the accurate capture time to have good
    377     // audio / video sync.
    378     frame_input_->InsertAudio(input_bus.Pass(), base::TimeTicks::Now());
    379   }
    380 
    381   // Return a resampled audio data from input. This is called when the
    382   // input sample rate doesn't match the output.
    383   // The flow of data is as follows:
    384   // |audio_data| ->
    385   //     AudioFifo |fifo_| ->
    386   //         MultiChannelResampler |resampler|.
    387   //
    388   // The resampler pulls data out of the FIFO and resample the data in
    389   // frequency domain. It might call |fifo_| for more than once. But no more
    390   // than |kBufferAudioData| times. We preroll audio data into the FIFO to
    391   // make sure there's enough data for resampling.
    392   scoped_ptr<media::AudioBus> ResampleData(
    393       const int16* audio_data,
    394       int sample_rate,
    395       int number_of_channels,
    396       int number_of_frames) {
    397     DCHECK_EQ(number_of_channels, output_channels_);
    398     fifo_input_bus_->FromInterleaved(
    399         audio_data, number_of_frames, number_of_channels);
    400     fifo_->Push(fifo_input_bus_.get());
    401 
    402     if (input_preroll_ < kBufferAudioData - 1) {
    403       ++input_preroll_;
    404       return scoped_ptr<media::AudioBus>();
    405     }
    406 
    407     scoped_ptr<media::AudioBus> output_bus(
    408         media::AudioBus::Create(
    409             output_channels_,
    410             output_sample_rate_ * fifo_input_bus_->frames() / sample_rate));
    411 
    412     // Resampler will then call ProvideData() below to fetch data from
    413     // |input_data_|.
    414     resampler_->Resample(output_bus->frames(), output_bus.get());
    415     return output_bus.Pass();
    416   }
    417 
    418   // Called on real-time audio thread.
    419   virtual void OnSetFormat(const media::AudioParameters& params) OVERRIDE {
    420     if (params.sample_rate() == output_sample_rate_)
    421       return;
    422     fifo_.reset(new media::AudioFifo(
    423         output_channels_,
    424         kBufferAudioData * params.frames_per_buffer()));
    425     fifo_input_bus_ = media::AudioBus::Create(
    426         params.channels(), params.frames_per_buffer());
    427     resampler_.reset(new media::MultiChannelResampler(
    428         output_channels_,
    429         static_cast<double>(params.sample_rate()) / output_sample_rate_,
    430         params.frames_per_buffer(),
    431         base::Bind(&CastAudioSink::ProvideData, base::Unretained(this))));
    432   }
    433 
    434   // Add this sink to the track. Data received from the track will be
    435   // submitted to |frame_input|.
    436   void AddToTrack(
    437       const scoped_refptr<media::cast::AudioFrameInput>& frame_input) {
    438     DCHECK(!sink_added_);
    439     sink_added_ = true;
    440 
    441     // This member is written here and then accessed on the IO thread
    442     // We will not get data until AddToAudioTrack is called so it is
    443     // safe to access this member now.
    444     frame_input_ = frame_input;
    445     AddToAudioTrack(this, track_);
    446   }
    447 
    448   void ProvideData(int frame_delay, media::AudioBus* output_bus) {
    449     fifo_->Consume(output_bus, 0, output_bus->frames());
    450   }
    451 
    452  private:
    453   blink::WebMediaStreamTrack track_;
    454   bool sink_added_;
    455   CastRtpStream::ErrorCallback error_callback_;
    456   base::WeakPtrFactory<CastAudioSink> weak_factory_;
    457 
    458   const int output_channels_;
    459   const int output_sample_rate_;
    460 
    461   // These member are accessed on the real-time audio time only.
    462   scoped_refptr<media::cast::AudioFrameInput> frame_input_;
    463   scoped_ptr<media::MultiChannelResampler> resampler_;
    464   scoped_ptr<media::AudioFifo> fifo_;
    465   scoped_ptr<media::AudioBus> fifo_input_bus_;
    466   int input_preroll_;
    467 
    468   DISALLOW_COPY_AND_ASSIGN(CastAudioSink);
    469 };
    470 
    471 CastRtpParams::CastRtpParams(const CastRtpPayloadParams& payload_params)
    472     : payload(payload_params) {}
    473 
    474 CastCodecSpecificParams::CastCodecSpecificParams() {}
    475 
    476 CastCodecSpecificParams::~CastCodecSpecificParams() {}
    477 
    478 CastRtpPayloadParams::CastRtpPayloadParams()
    479     : payload_type(0),
    480       max_latency_ms(0),
    481       min_latency_ms(0),
    482       ssrc(0),
    483       feedback_ssrc(0),
    484       clock_rate(0),
    485       max_bitrate(0),
    486       min_bitrate(0),
    487       channels(0),
    488       max_frame_rate(0.0),
    489       width(0),
    490       height(0) {}
    491 
    492 CastRtpPayloadParams::~CastRtpPayloadParams() {}
    493 
    494 CastRtpParams::CastRtpParams() {}
    495 
    496 CastRtpParams::~CastRtpParams() {}
    497 
    498 CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track,
    499                              const scoped_refptr<CastSession>& session)
    500     : track_(track), cast_session_(session), weak_factory_(this) {}
    501 
    502 CastRtpStream::~CastRtpStream() {}
    503 
    504 std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() {
    505   if (IsAudio())
    506     return SupportedAudioParams();
    507   else
    508     return SupportedVideoParams();
    509 }
    510 
    511 CastRtpParams CastRtpStream::GetParams() { return params_; }
    512 
    513 void CastRtpStream::Start(const CastRtpParams& params,
    514                           const base::Closure& start_callback,
    515                           const base::Closure& stop_callback,
    516                           const ErrorCallback& error_callback) {
    517   VLOG(1) << "CastRtpStream::Start = " << (IsAudio() ? "audio" : "video");
    518   stop_callback_ = stop_callback;
    519   error_callback_ = error_callback;
    520 
    521   if (IsAudio()) {
    522     AudioSenderConfig config;
    523     if (!ToAudioSenderConfig(params, &config)) {
    524       DidEncounterError("Invalid parameters for audio.");
    525       return;
    526     }
    527 
    528     // In case of error we have to go through DidEncounterError() to stop
    529     // the streaming after reporting the error.
    530     audio_sink_.reset(new CastAudioSink(
    531         track_,
    532         media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError,
    533                                             weak_factory_.GetWeakPtr())),
    534         params.payload.channels,
    535         params.payload.clock_rate));
    536     cast_session_->StartAudio(
    537         config,
    538         base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr()),
    539         base::Bind(&CastRtpStream::DidEncounterError,
    540                    weak_factory_.GetWeakPtr()));
    541     start_callback.Run();
    542   } else {
    543     VideoSenderConfig config;
    544     if (!ToVideoSenderConfig(params, &config)) {
    545       DidEncounterError("Invalid parameters for video.");
    546       return;
    547     }
    548     // See the code for audio above for explanation of callbacks.
    549     video_sink_.reset(new CastVideoSink(
    550         track_,
    551         gfx::Size(config.width, config.height),
    552         media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError,
    553                                             weak_factory_.GetWeakPtr()))));
    554     cast_session_->StartVideo(
    555         config,
    556         base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr()),
    557         base::Bind(&CastRtpStream::DidEncounterError,
    558                    weak_factory_.GetWeakPtr()));
    559     start_callback.Run();
    560   }
    561 }
    562 
    563 void CastRtpStream::Stop() {
    564   VLOG(1) << "CastRtpStream::Stop = " << (IsAudio() ? "audio" : "video");
    565   audio_sink_.reset();
    566   video_sink_.reset();
    567   if (!stop_callback_.is_null())
    568     stop_callback_.Run();
    569 }
    570 
    571 void CastRtpStream::ToggleLogging(bool enable) {
    572   VLOG(1) << "CastRtpStream::ToggleLogging(" << enable << ") = "
    573           << (IsAudio() ? "audio" : "video");
    574   cast_session_->ToggleLogging(IsAudio(), enable);
    575 }
    576 
    577 void CastRtpStream::GetRawEvents(
    578     const base::Callback<void(scoped_ptr<base::BinaryValue>)>& callback,
    579     const std::string& extra_data) {
    580   VLOG(1) << "CastRtpStream::GetRawEvents = "
    581           << (IsAudio() ? "audio" : "video");
    582   cast_session_->GetEventLogsAndReset(IsAudio(), extra_data, callback);
    583 }
    584 
    585 void CastRtpStream::GetStats(
    586     const base::Callback<void(scoped_ptr<base::DictionaryValue>)>& callback) {
    587   VLOG(1) << "CastRtpStream::GetStats = "
    588           << (IsAudio() ? "audio" : "video");
    589   cast_session_->GetStatsAndReset(IsAudio(), callback);
    590 }
    591 
    592 bool CastRtpStream::IsAudio() const {
    593   return track_.source().type() == blink::WebMediaStreamSource::TypeAudio;
    594 }
    595 
    596 void CastRtpStream::DidEncounterError(const std::string& message) {
    597   VLOG(1) << "CastRtpStream::DidEncounterError(" << message << ") = "
    598           << (IsAudio() ? "audio" : "video");
    599   // Save the WeakPtr first because the error callback might delete this object.
    600   base::WeakPtr<CastRtpStream> ptr = weak_factory_.GetWeakPtr();
    601   error_callback_.Run(message);
    602   content::RenderThread::Get()->GetMessageLoop()->PostTask(
    603       FROM_HERE,
    604       base::Bind(&CastRtpStream::Stop, ptr));
    605 }
    606