Home | History | Annotate | Download | only in test
      1 // Copyright 2013 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include <algorithm>
      6 #include <climits>
      7 #include <cstdarg>
      8 #include <cstdio>
      9 #include <deque>
     10 #include <map>
     11 #include <string>
     12 #include <utility>
     13 
     14 #include "base/at_exit.h"
     15 #include "base/command_line.h"
     16 #include "base/logging.h"
     17 #include "base/memory/ref_counted.h"
     18 #include "base/memory/scoped_ptr.h"
     19 #include "base/message_loop/message_loop.h"
     20 #include "base/synchronization/lock.h"
     21 #include "base/synchronization/waitable_event.h"
     22 #include "base/threading/thread.h"
     23 #include "base/time/default_tick_clock.h"
     24 #include "base/timer/timer.h"
     25 #include "media/audio/audio_io.h"
     26 #include "media/audio/audio_manager.h"
     27 #include "media/audio/audio_parameters.h"
     28 #include "media/audio/fake_audio_log_factory.h"
     29 #include "media/base/audio_bus.h"
     30 #include "media/base/channel_layout.h"
     31 #include "media/base/video_frame.h"
     32 #include "media/cast/cast_config.h"
     33 #include "media/cast/cast_environment.h"
     34 #include "media/cast/cast_receiver.h"
     35 #include "media/cast/logging/logging_defines.h"
     36 #include "media/cast/test/utility/audio_utility.h"
     37 #include "media/cast/test/utility/barcode.h"
     38 #include "media/cast/test/utility/default_config.h"
     39 #include "media/cast/test/utility/in_process_receiver.h"
     40 #include "media/cast/test/utility/input_builder.h"
     41 #include "media/cast/test/utility/standalone_cast_environment.h"
     42 #include "media/cast/transport/transport/udp_transport.h"
     43 #include "net/base/net_util.h"
     44 
     45 #if defined(OS_LINUX)
     46 #include "media/cast/test/linux_output_window.h"
     47 #endif  // OS_LINUX
     48 
     49 namespace media {
     50 namespace cast {
     51 
     52 // Settings chosen to match default sender settings.
     53 #define DEFAULT_SEND_PORT "0"
     54 #define DEFAULT_RECEIVE_PORT "2344"
     55 #define DEFAULT_SEND_IP "0.0.0.0"
     56 #define DEFAULT_AUDIO_FEEDBACK_SSRC "2"
     57 #define DEFAULT_AUDIO_INCOMING_SSRC "1"
     58 #define DEFAULT_AUDIO_PAYLOAD_TYPE "127"
     59 #define DEFAULT_VIDEO_FEEDBACK_SSRC "12"
     60 #define DEFAULT_VIDEO_INCOMING_SSRC "11"
     61 #define DEFAULT_VIDEO_PAYLOAD_TYPE "96"
     62 
     63 #if defined(OS_LINUX)
     64 const char* kVideoWindowWidth = "1280";
     65 const char* kVideoWindowHeight = "720";
     66 #endif  // OS_LINUX
     67 
     68 void GetPorts(int* tx_port, int* rx_port) {
     69   test::InputBuilder tx_input(
     70       "Enter send port.", DEFAULT_SEND_PORT, 1, INT_MAX);
     71   *tx_port = tx_input.GetIntInput();
     72 
     73   test::InputBuilder rx_input(
     74       "Enter receive port.", DEFAULT_RECEIVE_PORT, 1, INT_MAX);
     75   *rx_port = rx_input.GetIntInput();
     76 }
     77 
     78 std::string GetIpAddress(const std::string display_text) {
     79   test::InputBuilder input(display_text, DEFAULT_SEND_IP, INT_MIN, INT_MAX);
     80   std::string ip_address = input.GetStringInput();
     81   // Ensure IP address is either the default value or in correct form.
     82   while (ip_address != DEFAULT_SEND_IP &&
     83          std::count(ip_address.begin(), ip_address.end(), '.') != 3) {
     84     ip_address = input.GetStringInput();
     85   }
     86   return ip_address;
     87 }
     88 
     89 void GetAudioSsrcs(FrameReceiverConfig* audio_config) {
     90   test::InputBuilder input_tx(
     91       "Choose audio sender SSRC.", DEFAULT_AUDIO_FEEDBACK_SSRC, 1, INT_MAX);
     92   audio_config->feedback_ssrc = input_tx.GetIntInput();
     93 
     94   test::InputBuilder input_rx(
     95       "Choose audio receiver SSRC.", DEFAULT_AUDIO_INCOMING_SSRC, 1, INT_MAX);
     96   audio_config->incoming_ssrc = input_rx.GetIntInput();
     97 }
     98 
     99 void GetVideoSsrcs(FrameReceiverConfig* video_config) {
    100   test::InputBuilder input_tx(
    101       "Choose video sender SSRC.", DEFAULT_VIDEO_FEEDBACK_SSRC, 1, INT_MAX);
    102   video_config->feedback_ssrc = input_tx.GetIntInput();
    103 
    104   test::InputBuilder input_rx(
    105       "Choose video receiver SSRC.", DEFAULT_VIDEO_INCOMING_SSRC, 1, INT_MAX);
    106   video_config->incoming_ssrc = input_rx.GetIntInput();
    107 }
    108 
    109 #if defined(OS_LINUX)
    110 void GetWindowSize(int* width, int* height) {
    111   // Resolution values based on sender settings
    112   test::InputBuilder input_w(
    113       "Choose window width.", kVideoWindowWidth, 144, 1920);
    114   *width = input_w.GetIntInput();
    115 
    116   test::InputBuilder input_h(
    117       "Choose window height.", kVideoWindowHeight, 176, 1080);
    118   *height = input_h.GetIntInput();
    119 }
    120 #endif  // OS_LINUX
    121 
    122 void GetAudioPayloadtype(FrameReceiverConfig* audio_config) {
    123   test::InputBuilder input("Choose audio receiver payload type.",
    124                            DEFAULT_AUDIO_PAYLOAD_TYPE,
    125                            96,
    126                            127);
    127   audio_config->rtp_payload_type = input.GetIntInput();
    128 }
    129 
    130 FrameReceiverConfig GetAudioReceiverConfig() {
    131   FrameReceiverConfig audio_config = GetDefaultAudioReceiverConfig();
    132   GetAudioSsrcs(&audio_config);
    133   GetAudioPayloadtype(&audio_config);
    134   audio_config.rtp_max_delay_ms = 300;
    135   return audio_config;
    136 }
    137 
    138 void GetVideoPayloadtype(FrameReceiverConfig* video_config) {
    139   test::InputBuilder input("Choose video receiver payload type.",
    140                            DEFAULT_VIDEO_PAYLOAD_TYPE,
    141                            96,
    142                            127);
    143   video_config->rtp_payload_type = input.GetIntInput();
    144 }
    145 
    146 FrameReceiverConfig GetVideoReceiverConfig() {
    147   FrameReceiverConfig video_config = GetDefaultVideoReceiverConfig();
    148   GetVideoSsrcs(&video_config);
    149   GetVideoPayloadtype(&video_config);
    150   video_config.rtp_max_delay_ms = 300;
    151   return video_config;
    152 }
    153 
    154 AudioParameters ToAudioParameters(const FrameReceiverConfig& config) {
    155   const int samples_in_10ms = config.frequency / 100;
    156   return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
    157                          GuessChannelLayout(config.channels),
    158                          config.frequency, 32, samples_in_10ms);
    159 }
    160 
    161 // An InProcessReceiver that renders video frames to a LinuxOutputWindow and
    162 // audio frames via Chromium's audio stack.
    163 //
    164 // InProcessReceiver pushes audio and video frames to this subclass, and these
    165 // frames are pushed into a queue.  Then, for audio, the Chromium audio stack
    166 // will make polling calls on a separate, unknown thread whereby audio frames
    167 // are pulled out of the audio queue as needed.  For video, however, NaivePlayer
    168 // is responsible for scheduling updates to the screen itself.  For both, the
    169 // queues are pruned (i.e., received frames are skipped) when the system is not
    170 // able to play back as fast as frames are entering the queue.
    171 //
    172 // This is NOT a good reference implementation for a Cast receiver player since:
    173 // 1. It only skips frames to handle slower-than-expected playout, or halts
    174 //    playback to handle frame underruns.
    175 // 2. It makes no attempt to synchronize the timing of playout of the video
    176 //    frames with the audio frames.
    177 // 3. It does nothing to smooth or hide discontinuities in playback due to
    178 //    timing issues or missing frames.
    179 class NaivePlayer : public InProcessReceiver,
    180                     public AudioOutputStream::AudioSourceCallback {
    181  public:
    182   NaivePlayer(const scoped_refptr<CastEnvironment>& cast_environment,
    183               const net::IPEndPoint& local_end_point,
    184               const net::IPEndPoint& remote_end_point,
    185               const FrameReceiverConfig& audio_config,
    186               const FrameReceiverConfig& video_config,
    187               int window_width,
    188               int window_height)
    189       : InProcessReceiver(cast_environment,
    190                           local_end_point,
    191                           remote_end_point,
    192                           audio_config,
    193                           video_config),
    194         // Maximum age is the duration of 3 video frames.  3 was chosen
    195         // arbitrarily, but seems to work well.
    196         max_frame_age_(base::TimeDelta::FromSeconds(1) * 3 /
    197                            video_config.max_frame_rate),
    198 #if defined(OS_LINUX)
    199         render_(0, 0, window_width, window_height, "Cast_receiver"),
    200 #endif  // OS_LINUX
    201         num_video_frames_processed_(0),
    202         num_audio_frames_processed_(0),
    203         currently_playing_audio_frame_start_(-1) {}
    204 
    205   virtual ~NaivePlayer() {}
    206 
    207   virtual void Start() OVERRIDE {
    208     AudioManager::Get()->GetTaskRunner()->PostTask(
    209         FROM_HERE,
    210         base::Bind(&NaivePlayer::StartAudioOutputOnAudioManagerThread,
    211                    base::Unretained(this)));
    212     // Note: No need to wait for audio polling to start since the push-and-pull
    213     // mechanism is synchronized via the |audio_playout_queue_|.
    214     InProcessReceiver::Start();
    215   }
    216 
    217   virtual void Stop() OVERRIDE {
    218     // First, stop audio output to the Chromium audio stack.
    219     base::WaitableEvent done(false, false);
    220     DCHECK(!AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
    221     AudioManager::Get()->GetTaskRunner()->PostTask(
    222         FROM_HERE,
    223         base::Bind(&NaivePlayer::StopAudioOutputOnAudioManagerThread,
    224                    base::Unretained(this),
    225                    &done));
    226     done.Wait();
    227 
    228     // Now, stop receiving new frames.
    229     InProcessReceiver::Stop();
    230 
    231     // Finally, clear out any frames remaining in the queues.
    232     while (!audio_playout_queue_.empty()) {
    233       const scoped_ptr<AudioBus> to_be_deleted(
    234           audio_playout_queue_.front().second);
    235       audio_playout_queue_.pop_front();
    236     }
    237     video_playout_queue_.clear();
    238   }
    239 
    240  private:
    241   void StartAudioOutputOnAudioManagerThread() {
    242     DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
    243     DCHECK(!audio_output_stream_);
    244     audio_output_stream_.reset(AudioManager::Get()->MakeAudioOutputStreamProxy(
    245         ToAudioParameters(audio_config()), ""));
    246     if (audio_output_stream_.get() && audio_output_stream_->Open()) {
    247       audio_output_stream_->Start(this);
    248     } else {
    249       LOG(ERROR) << "Failed to open an audio output stream.  "
    250                  << "Audio playback disabled.";
    251       audio_output_stream_.reset();
    252     }
    253   }
    254 
    255   void StopAudioOutputOnAudioManagerThread(base::WaitableEvent* done) {
    256     DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
    257     if (audio_output_stream_.get()) {
    258       audio_output_stream_->Stop();
    259       audio_output_stream_->Close();
    260       audio_output_stream_.reset();
    261     }
    262     done->Signal();
    263   }
    264 
    265   ////////////////////////////////////////////////////////////////////
    266   // InProcessReceiver overrides.
    267 
    268   virtual void OnVideoFrame(const scoped_refptr<VideoFrame>& video_frame,
    269                             const base::TimeTicks& playout_time,
    270                             bool is_continuous) OVERRIDE {
    271     DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
    272     LOG_IF(WARNING, !is_continuous)
    273         << "Video: Discontinuity in received frames.";
    274     video_playout_queue_.push_back(std::make_pair(playout_time, video_frame));
    275     ScheduleVideoPlayout();
    276     uint16 frame_no;
    277     if (media::cast::test::DecodeBarcode(video_frame, &frame_no)) {
    278       video_play_times_.insert(
    279           std::pair<uint16, base::TimeTicks>(frame_no, playout_time));
    280     } else {
    281       VLOG(2) << "Barcode decode failed!";
    282     }
    283   }
    284 
    285   virtual void OnAudioFrame(scoped_ptr<AudioBus> audio_frame,
    286                             const base::TimeTicks& playout_time,
    287                             bool is_continuous) OVERRIDE {
    288     DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
    289     LOG_IF(WARNING, !is_continuous)
    290         << "Audio: Discontinuity in received frames.";
    291     base::AutoLock auto_lock(audio_lock_);
    292     uint16 frame_no;
    293     if (media::cast::DecodeTimestamp(audio_frame->channel(0),
    294                                      audio_frame->frames(),
    295                                      &frame_no)) {
    296       // Since there are lots of audio packets with the same frame_no,
    297       // we really want to make sure that we get the playout_time from
    298       // the first one. If is_continous is true, then it's possible
    299       // that we already missed the first one.
    300       if (is_continuous && frame_no == last_audio_frame_no_ + 1) {
    301         audio_play_times_.insert(
    302             std::pair<uint16, base::TimeTicks>(frame_no, playout_time));
    303       }
    304       last_audio_frame_no_ = frame_no;
    305     } else {
    306       VLOG(2) << "Audio decode failed!";
    307       last_audio_frame_no_ = -2;
    308     }
    309     audio_playout_queue_.push_back(
    310         std::make_pair(playout_time, audio_frame.release()));
    311   }
    312 
    313   // End of InProcessReceiver overrides.
    314   ////////////////////////////////////////////////////////////////////
    315 
    316   ////////////////////////////////////////////////////////////////////
    317   // AudioSourceCallback implementation.
    318 
    319   virtual int OnMoreData(AudioBus* dest, AudioBuffersState buffers_state)
    320       OVERRIDE {
    321     // Note: This method is being invoked by a separate thread unknown to us
    322     // (i.e., outside of CastEnvironment).
    323 
    324     int samples_remaining = dest->frames();
    325 
    326     while (samples_remaining > 0) {
    327       // Get next audio frame ready for playout.
    328       if (!currently_playing_audio_frame_.get()) {
    329         base::AutoLock auto_lock(audio_lock_);
    330 
    331         // Prune the queue, skipping entries that are too old.
    332         // TODO(miu): Use |buffers_state| to account for audio buffering delays
    333         // upstream.
    334         const base::TimeTicks earliest_time_to_play =
    335             cast_env()->Clock()->NowTicks() - max_frame_age_;
    336         while (!audio_playout_queue_.empty() &&
    337                audio_playout_queue_.front().first < earliest_time_to_play) {
    338           PopOneAudioFrame(true);
    339         }
    340         if (audio_playout_queue_.empty())
    341           break;
    342 
    343         currently_playing_audio_frame_ = PopOneAudioFrame(false).Pass();
    344         currently_playing_audio_frame_start_ = 0;
    345       }
    346 
    347       // Copy some or all of the samples in |currently_playing_audio_frame_| to
    348       // |dest|.  Once all samples in |currently_playing_audio_frame_| have been
    349       // consumed, release it.
    350       const int num_samples_to_copy =
    351           std::min(samples_remaining,
    352                    currently_playing_audio_frame_->frames() -
    353                        currently_playing_audio_frame_start_);
    354       currently_playing_audio_frame_->CopyPartialFramesTo(
    355           currently_playing_audio_frame_start_,
    356           num_samples_to_copy,
    357           0,
    358           dest);
    359       samples_remaining -= num_samples_to_copy;
    360       currently_playing_audio_frame_start_ += num_samples_to_copy;
    361       if (currently_playing_audio_frame_start_ ==
    362               currently_playing_audio_frame_->frames()) {
    363         currently_playing_audio_frame_.reset();
    364       }
    365     }
    366 
    367     // If |dest| has not been fully filled, then an underrun has occurred; and
    368     // fill the remainder of |dest| with zeros.
    369     if (samples_remaining > 0) {
    370       // Note: Only logging underruns after the first frame has been received.
    371       LOG_IF(WARNING, currently_playing_audio_frame_start_ != -1)
    372           << "Audio: Playback underrun of " << samples_remaining << " samples!";
    373       dest->ZeroFramesPartial(dest->frames() - samples_remaining,
    374                               samples_remaining);
    375     }
    376 
    377     return dest->frames();
    378   }
    379 
    380   virtual void OnError(AudioOutputStream* stream) OVERRIDE {
    381     LOG(ERROR) << "AudioOutputStream reports an error.  "
    382                << "Playback is unlikely to continue.";
    383   }
    384 
    385   // End of AudioSourceCallback implementation.
    386   ////////////////////////////////////////////////////////////////////
    387 
    388   void ScheduleVideoPlayout() {
    389     DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
    390 
    391     // Prune the queue, skipping entries that are too old.
    392     const base::TimeTicks now = cast_env()->Clock()->NowTicks();
    393     const base::TimeTicks earliest_time_to_play = now - max_frame_age_;
    394     while (!video_playout_queue_.empty() &&
    395            video_playout_queue_.front().first < earliest_time_to_play) {
    396       PopOneVideoFrame(true);
    397     }
    398 
    399     // If the queue is not empty, schedule playout of its first frame.
    400     if (video_playout_queue_.empty()) {
    401       video_playout_timer_.Stop();
    402     } else {
    403       video_playout_timer_.Start(
    404           FROM_HERE,
    405           video_playout_queue_.front().first - now,
    406           base::Bind(&NaivePlayer::PlayNextVideoFrame,
    407                      base::Unretained(this)));
    408     }
    409   }
    410 
    411   void PlayNextVideoFrame() {
    412     DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
    413     if (!video_playout_queue_.empty()) {
    414       const scoped_refptr<VideoFrame> video_frame = PopOneVideoFrame(false);
    415 #ifdef OS_LINUX
    416       render_.RenderFrame(video_frame);
    417 #endif  // OS_LINUX
    418     }
    419     ScheduleVideoPlayout();
    420     CheckAVSync();
    421   }
    422 
    423   scoped_refptr<VideoFrame> PopOneVideoFrame(bool is_being_skipped) {
    424     DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
    425 
    426     if (is_being_skipped) {
    427       VLOG(1) << "VideoFrame[" << num_video_frames_processed_
    428               << " (dt=" << (video_playout_queue_.front().first -
    429                              last_popped_video_playout_time_).InMicroseconds()
    430               << " usec)]: Skipped.";
    431     } else {
    432       VLOG(1) << "VideoFrame[" << num_video_frames_processed_
    433               << " (dt=" << (video_playout_queue_.front().first -
    434                              last_popped_video_playout_time_).InMicroseconds()
    435               << " usec)]: Playing "
    436               << (cast_env()->Clock()->NowTicks() -
    437                       video_playout_queue_.front().first).InMicroseconds()
    438               << " usec later than intended.";
    439     }
    440 
    441     last_popped_video_playout_time_ = video_playout_queue_.front().first;
    442     const scoped_refptr<VideoFrame> ret = video_playout_queue_.front().second;
    443     video_playout_queue_.pop_front();
    444     ++num_video_frames_processed_;
    445     return ret;
    446   }
    447 
    448   scoped_ptr<AudioBus> PopOneAudioFrame(bool was_skipped) {
    449     audio_lock_.AssertAcquired();
    450 
    451     if (was_skipped) {
    452       VLOG(1) << "AudioFrame[" << num_audio_frames_processed_
    453               << " (dt=" << (audio_playout_queue_.front().first -
    454                              last_popped_audio_playout_time_).InMicroseconds()
    455               << " usec)]: Skipped.";
    456     } else {
    457       VLOG(1) << "AudioFrame[" << num_audio_frames_processed_
    458               << " (dt=" << (audio_playout_queue_.front().first -
    459                              last_popped_audio_playout_time_).InMicroseconds()
    460               << " usec)]: Playing "
    461               << (cast_env()->Clock()->NowTicks() -
    462                       audio_playout_queue_.front().first).InMicroseconds()
    463               << " usec later than intended.";
    464     }
    465 
    466     last_popped_audio_playout_time_ = audio_playout_queue_.front().first;
    467     scoped_ptr<AudioBus> ret(audio_playout_queue_.front().second);
    468     audio_playout_queue_.pop_front();
    469     ++num_audio_frames_processed_;
    470     return ret.Pass();
    471   }
    472 
    473   void CheckAVSync() {
    474     if (video_play_times_.size() > 30 &&
    475         audio_play_times_.size() > 30) {
    476       size_t num_events = 0;
    477       base::TimeDelta delta;
    478       std::map<uint16, base::TimeTicks>::iterator audio_iter, video_iter;
    479       for (video_iter = video_play_times_.begin();
    480            video_iter != video_play_times_.end();
    481            ++video_iter) {
    482         audio_iter = audio_play_times_.find(video_iter->first);
    483         if (audio_iter != audio_play_times_.end()) {
    484           num_events++;
    485           // Positive values means audio is running behind video.
    486           delta += audio_iter->second - video_iter->second;
    487         }
    488       }
    489 
    490       if (num_events > 30) {
    491         VLOG(0) << "Audio behind by: "
    492                 << (delta / num_events).InMilliseconds()
    493                 << "ms";
    494         video_play_times_.clear();
    495         audio_play_times_.clear();
    496       }
    497     } else if (video_play_times_.size() + audio_play_times_.size() > 500) {
    498       // We are decoding audio or video timestamps, but not both, clear it out.
    499       video_play_times_.clear();
    500       audio_play_times_.clear();
    501     }
    502   }
    503 
    504   // Frames in the queue older than this (relative to NowTicks()) will be
    505   // dropped (i.e., playback is falling behind).
    506   const base::TimeDelta max_frame_age_;
    507 
    508   // Outputs created, started, and destroyed by this NaivePlayer.
    509 #ifdef OS_LINUX
    510   test::LinuxOutputWindow render_;
    511 #endif  // OS_LINUX
    512   scoped_ptr<AudioOutputStream> audio_output_stream_;
    513 
    514   // Video playout queue.
    515   typedef std::pair<base::TimeTicks, scoped_refptr<VideoFrame> >
    516       VideoQueueEntry;
    517   std::deque<VideoQueueEntry> video_playout_queue_;
    518   base::TimeTicks last_popped_video_playout_time_;
    519   int64 num_video_frames_processed_;
    520 
    521   base::OneShotTimer<NaivePlayer> video_playout_timer_;
    522 
    523   // Audio playout queue, synchronized by |audio_lock_|.
    524   base::Lock audio_lock_;
    525   typedef std::pair<base::TimeTicks, AudioBus*> AudioQueueEntry;
    526   std::deque<AudioQueueEntry> audio_playout_queue_;
    527   base::TimeTicks last_popped_audio_playout_time_;
    528   int64 num_audio_frames_processed_;
    529 
    530   // These must only be used on the audio thread calling OnMoreData().
    531   scoped_ptr<AudioBus> currently_playing_audio_frame_;
    532   int currently_playing_audio_frame_start_;
    533 
    534   std::map<uint16, base::TimeTicks> audio_play_times_;
    535   std::map<uint16, base::TimeTicks> video_play_times_;
    536   int32 last_audio_frame_no_;
    537 };
    538 
    539 }  // namespace cast
    540 }  // namespace media
    541 
    542 int main(int argc, char** argv) {
    543   base::AtExitManager at_exit;
    544   CommandLine::Init(argc, argv);
    545   InitLogging(logging::LoggingSettings());
    546 
    547   scoped_refptr<media::cast::CastEnvironment> cast_environment(
    548       new media::cast::StandaloneCastEnvironment);
    549 
    550   // Start up Chromium audio system.
    551   media::FakeAudioLogFactory fake_audio_log_factory_;
    552   const scoped_ptr<media::AudioManager> audio_manager(
    553       media::AudioManager::Create(&fake_audio_log_factory_));
    554   CHECK(media::AudioManager::Get());
    555 
    556   media::cast::FrameReceiverConfig audio_config =
    557       media::cast::GetAudioReceiverConfig();
    558   media::cast::FrameReceiverConfig video_config =
    559       media::cast::GetVideoReceiverConfig();
    560 
    561   // Determine local and remote endpoints.
    562   int remote_port, local_port;
    563   media::cast::GetPorts(&remote_port, &local_port);
    564   if (!local_port) {
    565     LOG(ERROR) << "Invalid local port.";
    566     return 1;
    567   }
    568   std::string remote_ip_address = media::cast::GetIpAddress("Enter remote IP.");
    569   std::string local_ip_address = media::cast::GetIpAddress("Enter local IP.");
    570   net::IPAddressNumber remote_ip_number;
    571   net::IPAddressNumber local_ip_number;
    572   if (!net::ParseIPLiteralToNumber(remote_ip_address, &remote_ip_number)) {
    573     LOG(ERROR) << "Invalid remote IP address.";
    574     return 1;
    575   }
    576   if (!net::ParseIPLiteralToNumber(local_ip_address, &local_ip_number)) {
    577     LOG(ERROR) << "Invalid local IP address.";
    578     return 1;
    579   }
    580   net::IPEndPoint remote_end_point(remote_ip_number, remote_port);
    581   net::IPEndPoint local_end_point(local_ip_number, local_port);
    582 
    583   // Create and start the player.
    584   int window_width = 0;
    585   int window_height = 0;
    586 #if defined(OS_LINUX)
    587   media::cast::GetWindowSize(&window_width, &window_height);
    588 #endif  // OS_LINUX
    589   media::cast::NaivePlayer player(cast_environment,
    590                                   local_end_point,
    591                                   remote_end_point,
    592                                   audio_config,
    593                                   video_config,
    594                                   window_width,
    595                                   window_height);
    596   player.Start();
    597 
    598   base::MessageLoop().Run();  // Run forever (i.e., until SIGTERM).
    599   NOTREACHED();
    600   return 0;
    601 }
    602