Home | History | Annotate | Download | only in media
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "content/renderer/media/media_stream_dependency_factory.h"
      6 
      7 #include <vector>
      8 
      9 #include "base/command_line.h"
     10 #include "base/strings/utf_string_conversions.h"
     11 #include "base/synchronization/waitable_event.h"
     12 #include "content/public/common/content_switches.h"
     13 #include "content/renderer/media/media_stream_source_extra_data.h"
     14 #include "content/renderer/media/media_stream_track_extra_data.h"
     15 #include "content/renderer/media/media_stream_video_track.h"
     16 #include "content/renderer/media/peer_connection_identity_service.h"
     17 #include "content/renderer/media/rtc_media_constraints.h"
     18 #include "content/renderer/media/rtc_peer_connection_handler.h"
     19 #include "content/renderer/media/rtc_video_capturer.h"
     20 #include "content/renderer/media/rtc_video_decoder_factory.h"
     21 #include "content/renderer/media/rtc_video_encoder_factory.h"
     22 #include "content/renderer/media/video_capture_impl_manager.h"
     23 #include "content/renderer/media/webaudio_capturer_source.h"
     24 #include "content/renderer/media/webrtc_audio_device_impl.h"
     25 #include "content/renderer/media/webrtc_local_audio_track.h"
     26 #include "content/renderer/media/webrtc_uma_histograms.h"
     27 #include "content/renderer/p2p/ipc_network_manager.h"
     28 #include "content/renderer/p2p/ipc_socket_factory.h"
     29 #include "content/renderer/p2p/port_allocator.h"
     30 #include "content/renderer/render_thread_impl.h"
     31 #include "jingle/glue/thread_wrapper.h"
     32 #include "media/filters/gpu_video_accelerator_factories.h"
     33 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
     34 #include "third_party/WebKit/public/platform/WebMediaStream.h"
     35 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
     36 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
     37 #include "third_party/WebKit/public/platform/WebURL.h"
     38 #include "third_party/WebKit/public/web/WebDocument.h"
     39 #include "third_party/WebKit/public/web/WebFrame.h"
     40 #include "third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h"
     41 
     42 #if defined(USE_OPENSSL)
     43 #include "third_party/libjingle/source/talk/base/ssladapter.h"
     44 #else
     45 #include "net/socket/nss_ssl_util.h"
     46 #endif
     47 
     48 #if defined(GOOGLE_TV)
     49 #include "content/renderer/media/rtc_video_decoder_factory_tv.h"
     50 #endif
     51 
     52 #if defined(OS_ANDROID)
     53 #include "media/base/android/media_codec_bridge.h"
     54 #endif
     55 
     56 namespace content {
     57 
     58 // Constant constraint keys which enables default audio constraints on
     59 // mediastreams with audio.
     60 struct {
     61   const char* key;
     62   const char* value;
     63 } const kDefaultAudioConstraints[] = {
     64   { webrtc::MediaConstraintsInterface::kEchoCancellation,
     65     webrtc::MediaConstraintsInterface::kValueTrue },
     66 #if defined(OS_CHROMEOS) || defined(OS_MACOSX)
     67   // Enable the extended filter mode AEC on platforms with known echo issues.
     68   { webrtc::MediaConstraintsInterface::kExperimentalEchoCancellation,
     69     webrtc::MediaConstraintsInterface::kValueTrue },
     70 #endif
     71   { webrtc::MediaConstraintsInterface::kAutoGainControl,
     72     webrtc::MediaConstraintsInterface::kValueTrue },
     73   { webrtc::MediaConstraintsInterface::kExperimentalAutoGainControl,
     74     webrtc::MediaConstraintsInterface::kValueTrue },
     75   { webrtc::MediaConstraintsInterface::kNoiseSuppression,
     76     webrtc::MediaConstraintsInterface::kValueTrue },
     77   { webrtc::MediaConstraintsInterface::kHighpassFilter,
     78     webrtc::MediaConstraintsInterface::kValueTrue },
     79 };
     80 
     81 // Map of corresponding media constraints and platform effects.
     82 struct {
     83   const char* constraint;
     84   const media::AudioParameters::PlatformEffectsMask effect;
     85 } const kConstraintEffectMap[] = {
     86   { webrtc::MediaConstraintsInterface::kEchoCancellation,
     87     media::AudioParameters::ECHO_CANCELLER},
     88 };
     89 
     90 // Merge |constraints| with |kDefaultAudioConstraints|. For any key which exists
     91 // in both, the value from |constraints| is maintained, including its
     92 // mandatory/optional status. New values from |kDefaultAudioConstraints| will
     93 // be added with mandatory status.
     94 void ApplyFixedAudioConstraints(RTCMediaConstraints* constraints) {
     95   for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kDefaultAudioConstraints); ++i) {
     96     bool already_set_value;
     97     if (!webrtc::FindConstraint(constraints, kDefaultAudioConstraints[i].key,
     98                                 &already_set_value, NULL)) {
     99       constraints->AddMandatory(kDefaultAudioConstraints[i].key,
    100           kDefaultAudioConstraints[i].value, false);
    101     } else {
    102       DVLOG(1) << "Constraint " << kDefaultAudioConstraints[i].key
    103                << " already set to " << already_set_value;
    104     }
    105   }
    106 }
    107 
    108 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
    109  public:
    110   P2PPortAllocatorFactory(
    111       P2PSocketDispatcher* socket_dispatcher,
    112       talk_base::NetworkManager* network_manager,
    113       talk_base::PacketSocketFactory* socket_factory,
    114       blink::WebFrame* web_frame)
    115       : socket_dispatcher_(socket_dispatcher),
    116         network_manager_(network_manager),
    117         socket_factory_(socket_factory),
    118         web_frame_(web_frame) {
    119   }
    120 
    121   virtual cricket::PortAllocator* CreatePortAllocator(
    122       const std::vector<StunConfiguration>& stun_servers,
    123       const std::vector<TurnConfiguration>& turn_configurations) OVERRIDE {
    124     CHECK(web_frame_);
    125     P2PPortAllocator::Config config;
    126     if (stun_servers.size() > 0) {
    127       config.stun_server = stun_servers[0].server.hostname();
    128       config.stun_server_port = stun_servers[0].server.port();
    129     }
    130     config.legacy_relay = false;
    131     for (size_t i = 0; i < turn_configurations.size(); ++i) {
    132       P2PPortAllocator::Config::RelayServerConfig relay_config;
    133       relay_config.server_address = turn_configurations[i].server.hostname();
    134       relay_config.port = turn_configurations[i].server.port();
    135       relay_config.username = turn_configurations[i].username;
    136       relay_config.password = turn_configurations[i].password;
    137       relay_config.transport_type = turn_configurations[i].transport_type;
    138       relay_config.secure = turn_configurations[i].secure;
    139       config.relays.push_back(relay_config);
    140     }
    141 
    142     // Use first turn server as the stun server.
    143     if (turn_configurations.size() > 0) {
    144       config.stun_server = config.relays[0].server_address;
    145       config.stun_server_port = config.relays[0].port;
    146     }
    147 
    148     return new P2PPortAllocator(
    149         web_frame_, socket_dispatcher_.get(), network_manager_,
    150         socket_factory_, config);
    151   }
    152 
    153  protected:
    154   virtual ~P2PPortAllocatorFactory() {}
    155 
    156  private:
    157   scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
    158   // |network_manager_| and |socket_factory_| are a weak references, owned by
    159   // MediaStreamDependencyFactory.
    160   talk_base::NetworkManager* network_manager_;
    161   talk_base::PacketSocketFactory* socket_factory_;
    162   // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
    163   blink::WebFrame* web_frame_;
    164 };
    165 
    166 // SourceStateObserver is a help class used for observing the startup state
    167 // transition of webrtc media sources such as a camera or microphone.
    168 // An instance of the object deletes itself after use.
    169 // Usage:
    170 // 1. Create an instance of the object with the blink::WebMediaStream
    171 //    the observed sources belongs to a callback.
    172 // 2. Add the sources to the observer using AddSource.
    173 // 3. Call StartObserving()
    174 // 4. The callback will be triggered when all sources have transitioned from
    175 //    webrtc::MediaSourceInterface::kInitializing.
    176 class SourceStateObserver : public webrtc::ObserverInterface,
    177                             public base::NonThreadSafe {
    178  public:
    179   SourceStateObserver(
    180       blink::WebMediaStream* web_stream,
    181       const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback)
    182      : web_stream_(web_stream),
    183        ready_callback_(callback),
    184        live_(true) {
    185   }
    186 
    187   void AddSource(webrtc::MediaSourceInterface* source) {
    188     DCHECK(CalledOnValidThread());
    189     switch (source->state()) {
    190       case webrtc::MediaSourceInterface::kInitializing:
    191         sources_.push_back(source);
    192         source->RegisterObserver(this);
    193         break;
    194       case webrtc::MediaSourceInterface::kLive:
    195         // The source is already live so we don't need to wait for it.
    196         break;
    197       case webrtc::MediaSourceInterface::kEnded:
    198         // The source have already failed.
    199         live_ = false;
    200         break;
    201       default:
    202         NOTREACHED();
    203     }
    204   }
    205 
    206   void StartObservering() {
    207     DCHECK(CalledOnValidThread());
    208     CheckIfSourcesAreLive();
    209   }
    210 
    211   virtual void OnChanged() OVERRIDE {
    212     DCHECK(CalledOnValidThread());
    213     CheckIfSourcesAreLive();
    214   }
    215 
    216  private:
    217   void CheckIfSourcesAreLive() {
    218     ObservedSources::iterator it = sources_.begin();
    219     while (it != sources_.end()) {
    220       if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) {
    221         live_ &=  (*it)->state() == webrtc::MediaSourceInterface::kLive;
    222         (*it)->UnregisterObserver(this);
    223         it = sources_.erase(it);
    224       } else {
    225         ++it;
    226       }
    227     }
    228     if (sources_.empty()) {
    229       ready_callback_.Run(web_stream_, live_);
    230       delete this;
    231     }
    232   }
    233 
    234   blink::WebMediaStream* web_stream_;
    235   MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_;
    236   bool live_;
    237   typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> >
    238       ObservedSources;
    239   ObservedSources sources_;
    240 };
    241 
    242 MediaStreamDependencyFactory::MediaStreamDependencyFactory(
    243     VideoCaptureImplManager* vc_manager,
    244     P2PSocketDispatcher* p2p_socket_dispatcher)
    245     : network_manager_(NULL),
    246 #if defined(GOOGLE_TV)
    247       decoder_factory_tv_(NULL),
    248 #endif
    249       vc_manager_(vc_manager),
    250       p2p_socket_dispatcher_(p2p_socket_dispatcher),
    251       signaling_thread_(NULL),
    252       worker_thread_(NULL),
    253       chrome_worker_thread_("Chrome_libJingle_WorkerThread") {
    254 }
    255 
    256 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() {
    257   CleanupPeerConnectionFactory();
    258 }
    259 
    260 blink::WebRTCPeerConnectionHandler*
    261 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler(
    262     blink::WebRTCPeerConnectionHandlerClient* client) {
    263   // Save histogram data so we can see how much PeerConnetion is used.
    264   // The histogram counts the number of calls to the JS API
    265   // webKitRTCPeerConnection.
    266   UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
    267 
    268   if (!EnsurePeerConnectionFactory())
    269     return NULL;
    270 
    271   return new RTCPeerConnectionHandler(client, this);
    272 }
    273 
    274 void MediaStreamDependencyFactory::CreateNativeMediaSources(
    275     int render_view_id,
    276     const blink::WebMediaConstraints& audio_constraints,
    277     const blink::WebMediaConstraints& video_constraints,
    278     blink::WebMediaStream* web_stream,
    279     const MediaSourcesCreatedCallback& sources_created) {
    280   DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()";
    281   if (!EnsurePeerConnectionFactory()) {
    282     sources_created.Run(web_stream, false);
    283     return;
    284   }
    285 
    286   // |source_observer| clean up itself when it has completed
    287   // source_observer->StartObservering.
    288   SourceStateObserver* source_observer =
    289       new SourceStateObserver(web_stream, sources_created);
    290 
    291   // Create local video sources.
    292   RTCMediaConstraints native_video_constraints(video_constraints);
    293   blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
    294   web_stream->videoTracks(video_tracks);
    295   for (size_t i = 0; i < video_tracks.size(); ++i) {
    296     const blink::WebMediaStreamSource& source = video_tracks[i].source();
    297     MediaStreamSourceExtraData* source_data =
    298         static_cast<MediaStreamSourceExtraData*>(source.extraData());
    299 
    300     // Check if the source has already been created. This happens when the same
    301     // source is used in multiple MediaStreams as a result of calling
    302     // getUserMedia.
    303     if (source_data->video_source())
    304       continue;
    305 
    306     const bool is_screencast =
    307         source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE ||
    308         source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
    309     source_data->SetVideoSource(
    310         CreateLocalVideoSource(source_data->device_info().session_id,
    311                                is_screencast,
    312                                &native_video_constraints).get());
    313     source_observer->AddSource(source_data->video_source());
    314   }
    315 
    316   // Do additional source initialization if the audio source is a valid
    317   // microphone or tab audio.
    318   RTCMediaConstraints native_audio_constraints(audio_constraints);
    319   ApplyFixedAudioConstraints(&native_audio_constraints);
    320   blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
    321   web_stream->audioTracks(audio_tracks);
    322   for (size_t i = 0; i < audio_tracks.size(); ++i) {
    323     const blink::WebMediaStreamSource& source = audio_tracks[i].source();
    324     MediaStreamSourceExtraData* source_data =
    325         static_cast<MediaStreamSourceExtraData*>(source.extraData());
    326 
    327     // Check if the source has already been created. This happens when the same
    328     // source is used in multiple MediaStreams as a result of calling
    329     // getUserMedia.
    330     if (source_data->local_audio_source())
    331       continue;
    332 
    333     // TODO(xians): Create a new capturer for difference microphones when we
    334     // support multiple microphones. See issue crbug/262117 .
    335     StreamDeviceInfo device_info = source_data->device_info();
    336     RTCMediaConstraints constraints = native_audio_constraints;
    337 
    338     // If any platform effects are available, check them against the
    339     // constraints. Disable effects to match false constraints, but if a
    340     // constraint is true, set the constraint to false to later disable the
    341     // software effect.
    342     int effects = device_info.device.input.effects;
    343     if (effects != media::AudioParameters::NO_EFFECTS) {
    344       for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
    345         bool value;
    346         if (!webrtc::FindConstraint(&constraints,
    347                 kConstraintEffectMap[i].constraint, &value, NULL) || !value) {
    348           // If the constraint is false, or does not exist, disable the platform
    349           // effect.
    350           effects &= ~kConstraintEffectMap[i].effect;
    351           DVLOG(1) << "Disabling constraint: "
    352                    << kConstraintEffectMap[i].constraint;
    353         } else if (effects & kConstraintEffectMap[i].effect) {
    354           // If the constraint is true, leave the platform effect enabled, and
    355           // set the constraint to false to later disable the software effect.
    356           constraints.AddMandatory(kConstraintEffectMap[i].constraint,
    357               webrtc::MediaConstraintsInterface::kValueFalse, true);
    358           DVLOG(1) << "Disabling platform effect: "
    359                    << kConstraintEffectMap[i].constraint;
    360         }
    361       }
    362       device_info.device.input.effects = effects;
    363     }
    364 
    365     scoped_refptr<WebRtcAudioCapturer> capturer(
    366         MaybeCreateAudioCapturer(render_view_id, device_info));
    367     if (!capturer.get()) {
    368       DLOG(WARNING) << "Failed to create the capturer for device "
    369                     << device_info.device.id;
    370       sources_created.Run(web_stream, false);
    371       // TODO(xians): Don't we need to check if source_observer is observing
    372       // something? If not, then it looks like we have a leak here.
    373       // OTOH, if it _is_ observing something, then the callback might
    374       // be called multiple times which is likely also a bug.
    375       return;
    376     }
    377     source_data->SetAudioCapturer(capturer);
    378 
    379     // Creates a LocalAudioSource object which holds audio options.
    380     // TODO(xians): The option should apply to the track instead of the source.
    381     source_data->SetLocalAudioSource(
    382         CreateLocalAudioSource(&constraints).get());
    383     source_observer->AddSource(source_data->local_audio_source());
    384   }
    385 
    386   source_observer->StartObservering();
    387 }
    388 
    389 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
    390     blink::WebMediaStream* web_stream) {
    391   DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
    392   if (!EnsurePeerConnectionFactory()) {
    393     DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
    394     return;
    395   }
    396 
    397   std::string label = UTF16ToUTF8(web_stream->id());
    398   scoped_refptr<webrtc::MediaStreamInterface> native_stream =
    399       CreateLocalMediaStream(label);
    400   MediaStreamExtraData* extra_data =
    401       new MediaStreamExtraData(native_stream.get(), true);
    402   web_stream->setExtraData(extra_data);
    403 
    404   // Add audio tracks.
    405   blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
    406   web_stream->audioTracks(audio_tracks);
    407   for (size_t i = 0; i < audio_tracks.size(); ++i) {
    408     AddNativeMediaStreamTrack(*web_stream, audio_tracks[i]);
    409   }
    410 
    411   // Add video tracks.
    412   blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
    413   web_stream->videoTracks(video_tracks);
    414   for (size_t i = 0; i < video_tracks.size(); ++i) {
    415     AddNativeMediaStreamTrack(*web_stream, video_tracks[i]);
    416   }
    417 }
    418 
    419 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
    420     blink::WebMediaStream* web_stream,
    421     const MediaStreamExtraData::StreamStopCallback& stream_stop) {
    422   CreateNativeLocalMediaStream(web_stream);
    423 
    424   MediaStreamExtraData* extra_data =
    425      static_cast<MediaStreamExtraData*>(web_stream->extraData());
    426   extra_data->SetLocalStreamStopCallback(stream_stop);
    427 }
    428 
    429 scoped_refptr<webrtc::AudioTrackInterface>
    430 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack(
    431     const blink::WebMediaStreamTrack& track) {
    432   blink::WebMediaStreamSource source = track.source();
    433   DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
    434   MediaStreamSourceExtraData* source_data =
    435       static_cast<MediaStreamSourceExtraData*>(source.extraData());
    436 
    437   // In the future the constraints will belong to the track itself, but
    438   // right now they're on the source, so we fetch them from there.
    439   RTCMediaConstraints track_constraints(source.constraints());
    440 
    441   // Apply default audio constraints that enable echo cancellation,
    442   // automatic gain control, noise suppression and high-pass filter.
    443   ApplyFixedAudioConstraints(&track_constraints);
    444 
    445   scoped_refptr<WebAudioCapturerSource> webaudio_source;
    446   if (!source_data) {
    447     if (source.requiresAudioConsumer()) {
    448       // We're adding a WebAudio MediaStream.
    449       // Create a specific capturer for each WebAudio consumer.
    450       webaudio_source = CreateWebAudioSource(&source, &track_constraints);
    451       source_data =
    452           static_cast<MediaStreamSourceExtraData*>(source.extraData());
    453     } else {
    454       // TODO(perkj): Implement support for sources from
    455       // remote MediaStreams.
    456       NOTIMPLEMENTED();
    457       return NULL;
    458     }
    459   }
    460 
    461   std::string track_id = UTF16ToUTF8(track.id());
    462   scoped_refptr<WebRtcAudioCapturer> capturer;
    463   if (GetWebRtcAudioDevice())
    464     capturer = GetWebRtcAudioDevice()->GetDefaultCapturer();
    465 
    466   scoped_refptr<webrtc::AudioTrackInterface> audio_track(
    467       CreateLocalAudioTrack(track_id,
    468                             capturer,
    469                             webaudio_source.get(),
    470                             source_data->local_audio_source(),
    471                             &track_constraints));
    472   AddNativeTrackToBlinkTrack(audio_track.get(), track, true);
    473 
    474   audio_track->set_enabled(track.isEnabled());
    475 
    476   // Pass the pointer of the source provider to the blink audio track.
    477   blink::WebMediaStreamTrack writable_track = track;
    478   writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>(
    479       audio_track.get())->audio_source_provider());
    480 
    481   return audio_track;
    482 }
    483 
    484 scoped_refptr<webrtc::VideoTrackInterface>
    485 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack(
    486     const blink::WebMediaStreamTrack& track) {
    487   blink::WebMediaStreamSource source = track.source();
    488   DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo);
    489   MediaStreamSourceExtraData* source_data =
    490       static_cast<MediaStreamSourceExtraData*>(source.extraData());
    491 
    492   if (!source_data) {
    493     // TODO(perkj): Implement support for sources from
    494     // remote MediaStreams.
    495     NOTIMPLEMENTED();
    496     return NULL;
    497   }
    498 
    499   std::string track_id = UTF16ToUTF8(track.id());
    500   scoped_refptr<webrtc::VideoTrackInterface> video_track(
    501       CreateLocalVideoTrack(track_id, source_data->video_source()));
    502   AddNativeTrackToBlinkTrack(video_track.get(), track, true);
    503 
    504   video_track->set_enabled(track.isEnabled());
    505 
    506   return video_track;
    507 }
    508 
    509 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack(
    510     const blink::WebMediaStreamTrack& track) {
    511   DCHECK(!track.isNull() && !track.extraData());
    512   DCHECK(!track.source().isNull());
    513 
    514   switch (track.source().type()) {
    515     case blink::WebMediaStreamSource::TypeAudio:
    516       CreateNativeAudioMediaStreamTrack(track);
    517       break;
    518     case blink::WebMediaStreamSource::TypeVideo:
    519       CreateNativeVideoMediaStreamTrack(track);
    520       break;
    521   }
    522 }
    523 
    524 bool MediaStreamDependencyFactory::AddNativeMediaStreamTrack(
    525     const blink::WebMediaStream& stream,
    526     const blink::WebMediaStreamTrack& track) {
    527   webrtc::MediaStreamInterface* native_stream = GetNativeMediaStream(stream);
    528   DCHECK(native_stream);
    529 
    530   switch (track.source().type()) {
    531     case blink::WebMediaStreamSource::TypeAudio: {
    532       scoped_refptr<webrtc::AudioTrackInterface> native_audio_track;
    533       if (!track.extraData()) {
    534         native_audio_track = CreateNativeAudioMediaStreamTrack(track);
    535       } else {
    536         native_audio_track = static_cast<webrtc::AudioTrackInterface*>(
    537             GetNativeMediaStreamTrack(track));
    538       }
    539 
    540       return native_audio_track.get() &&
    541           native_stream->AddTrack(native_audio_track);
    542     }
    543     case blink::WebMediaStreamSource::TypeVideo: {
    544       scoped_refptr<webrtc::VideoTrackInterface> native_video_track;
    545       if (!track.extraData()) {
    546         native_video_track = CreateNativeVideoMediaStreamTrack(track);
    547       } else {
    548         native_video_track = static_cast<webrtc::VideoTrackInterface*>(
    549             GetNativeMediaStreamTrack(track));
    550       }
    551 
    552       return native_video_track.get() &&
    553           native_stream->AddTrack(native_video_track);
    554     }
    555   }
    556   return false;
    557 }
    558 
    559 bool MediaStreamDependencyFactory::AddNativeVideoMediaTrack(
    560     const std::string& track_id,
    561     blink::WebMediaStream* stream,
    562     cricket::VideoCapturer* capturer) {
    563   if (!stream) {
    564     LOG(ERROR) << "AddNativeVideoMediaTrack called with null WebMediaStream.";
    565     return false;
    566   }
    567 
    568   // Create native track from the source.
    569   scoped_refptr<webrtc::VideoTrackInterface> native_track =
    570       CreateLocalVideoTrack(track_id, capturer);
    571 
    572   // Add the native track to native stream
    573   webrtc::MediaStreamInterface* native_stream =
    574       GetNativeMediaStream(*stream);
    575   DCHECK(native_stream);
    576   native_stream->AddTrack(native_track.get());
    577 
    578   // Create a new webkit video track.
    579   blink::WebMediaStreamTrack webkit_track;
    580   blink::WebMediaStreamSource webkit_source;
    581   blink::WebString webkit_track_id(UTF8ToUTF16(track_id));
    582   blink::WebMediaStreamSource::Type type =
    583       blink::WebMediaStreamSource::TypeVideo;
    584   webkit_source.initialize(webkit_track_id, type, webkit_track_id);
    585 
    586   webkit_track.initialize(webkit_track_id, webkit_source);
    587   AddNativeTrackToBlinkTrack(native_track.get(), webkit_track, true);
    588 
    589   // Add the track to WebMediaStream.
    590   stream->addTrack(webkit_track);
    591   return true;
    592 }
    593 
    594 bool MediaStreamDependencyFactory::RemoveNativeMediaStreamTrack(
    595     const blink::WebMediaStream& stream,
    596     const blink::WebMediaStreamTrack& track) {
    597   MediaStreamExtraData* extra_data =
    598       static_cast<MediaStreamExtraData*>(stream.extraData());
    599   webrtc::MediaStreamInterface* native_stream = extra_data->stream().get();
    600   DCHECK(native_stream);
    601   std::string track_id = UTF16ToUTF8(track.id());
    602   switch (track.source().type()) {
    603     case blink::WebMediaStreamSource::TypeAudio:
    604       return native_stream->RemoveTrack(
    605           native_stream->FindAudioTrack(track_id));
    606     case blink::WebMediaStreamSource::TypeVideo:
    607       return native_stream->RemoveTrack(
    608           native_stream->FindVideoTrack(track_id));
    609   }
    610   return false;
    611 }
    612 
    613 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
    614   DCHECK(!pc_factory_.get());
    615   DCHECK(!audio_device_.get());
    616   DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()";
    617 
    618   scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
    619   scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
    620 
    621   const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
    622   scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories =
    623       RenderThreadImpl::current()->GetGpuFactories();
    624 #if !defined(GOOGLE_TV)
    625   if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) {
    626     if (gpu_factories)
    627       decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories));
    628   }
    629 #else
    630   // PeerConnectionFactory will hold the ownership of this
    631   // VideoDecoderFactory.
    632   decoder_factory.reset(decoder_factory_tv_ = new RTCVideoDecoderFactoryTv());
    633 #endif
    634 
    635   if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding)) {
    636     if (gpu_factories)
    637       encoder_factory.reset(new RTCVideoEncoderFactory(gpu_factories));
    638   }
    639 
    640 #if defined(OS_ANDROID)
    641   if (!media::MediaCodecBridge::IsAvailable() ||
    642       !media::MediaCodecBridge::SupportsSetParameters()) {
    643     encoder_factory.reset();
    644   }
    645 #endif
    646 
    647   scoped_refptr<WebRtcAudioDeviceImpl> audio_device(
    648       new WebRtcAudioDeviceImpl());
    649 
    650   scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
    651       webrtc::CreatePeerConnectionFactory(worker_thread_,
    652                                           signaling_thread_,
    653                                           audio_device.get(),
    654                                           encoder_factory.release(),
    655                                           decoder_factory.release()));
    656   if (!factory.get()) {
    657     return false;
    658   }
    659 
    660   audio_device_ = audio_device;
    661   pc_factory_ = factory;
    662   webrtc::PeerConnectionFactoryInterface::Options factory_options;
    663   factory_options.enable_aec_dump =
    664       cmd_line->HasSwitch(switches::kEnableWebRtcAecRecordings);
    665   factory_options.disable_sctp_data_channels =
    666       cmd_line->HasSwitch(switches::kDisableSCTPDataChannels);
    667   factory_options.disable_encryption =
    668       cmd_line->HasSwitch(switches::kDisableWebRtcEncryption);
    669   pc_factory_->SetOptions(factory_options);
    670   return true;
    671 }
    672 
    673 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() {
    674   return pc_factory_.get() != NULL;
    675 }
    676 
    677 scoped_refptr<webrtc::PeerConnectionInterface>
    678 MediaStreamDependencyFactory::CreatePeerConnection(
    679     const webrtc::PeerConnectionInterface::IceServers& ice_servers,
    680     const webrtc::MediaConstraintsInterface* constraints,
    681     blink::WebFrame* web_frame,
    682     webrtc::PeerConnectionObserver* observer) {
    683   CHECK(web_frame);
    684   CHECK(observer);
    685 
    686   scoped_refptr<P2PPortAllocatorFactory> pa_factory =
    687         new talk_base::RefCountedObject<P2PPortAllocatorFactory>(
    688             p2p_socket_dispatcher_.get(),
    689             network_manager_,
    690             socket_factory_.get(),
    691             web_frame);
    692 
    693   PeerConnectionIdentityService* identity_service =
    694       new PeerConnectionIdentityService(
    695           GURL(web_frame->document().url().spec()).GetOrigin());
    696 
    697   return pc_factory_->CreatePeerConnection(ice_servers,
    698                                            constraints,
    699                                            pa_factory.get(),
    700                                            identity_service,
    701                                            observer).get();
    702 }
    703 
    704 scoped_refptr<webrtc::MediaStreamInterface>
    705 MediaStreamDependencyFactory::CreateLocalMediaStream(
    706     const std::string& label) {
    707   return pc_factory_->CreateLocalMediaStream(label).get();
    708 }
    709 
    710 scoped_refptr<webrtc::AudioSourceInterface>
    711 MediaStreamDependencyFactory::CreateLocalAudioSource(
    712     const webrtc::MediaConstraintsInterface* constraints) {
    713   scoped_refptr<webrtc::AudioSourceInterface> source =
    714       pc_factory_->CreateAudioSource(constraints).get();
    715   return source;
    716 }
    717 
    718 scoped_refptr<webrtc::VideoSourceInterface>
    719 MediaStreamDependencyFactory::CreateLocalVideoSource(
    720     int video_session_id,
    721     bool is_screencast,
    722     const webrtc::MediaConstraintsInterface* constraints) {
    723   RtcVideoCapturer* capturer = new RtcVideoCapturer(
    724       video_session_id, vc_manager_.get(), is_screencast);
    725 
    726   // The video source takes ownership of |capturer|.
    727   scoped_refptr<webrtc::VideoSourceInterface> source =
    728       pc_factory_->CreateVideoSource(capturer, constraints).get();
    729   return source;
    730 }
    731 
    732 scoped_refptr<WebAudioCapturerSource>
    733 MediaStreamDependencyFactory::CreateWebAudioSource(
    734     blink::WebMediaStreamSource* source,
    735     RTCMediaConstraints* constraints) {
    736   DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()";
    737   DCHECK(GetWebRtcAudioDevice());
    738 
    739   scoped_refptr<WebAudioCapturerSource>
    740       webaudio_capturer_source(new WebAudioCapturerSource());
    741   MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData();
    742 
    743   // Create a LocalAudioSource object which holds audio options.
    744   // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
    745   source_data->SetLocalAudioSource(CreateLocalAudioSource(constraints).get());
    746   source->setExtraData(source_data);
    747 
    748   // Replace the default source with WebAudio as source instead.
    749   source->addAudioConsumer(webaudio_capturer_source.get());
    750 
    751   return webaudio_capturer_source;
    752 }
    753 
    754 scoped_refptr<webrtc::VideoTrackInterface>
    755 MediaStreamDependencyFactory::CreateLocalVideoTrack(
    756     const std::string& id,
    757     webrtc::VideoSourceInterface* source) {
    758   return pc_factory_->CreateVideoTrack(id, source).get();
    759 }
    760 
    761 scoped_refptr<webrtc::VideoTrackInterface>
    762 MediaStreamDependencyFactory::CreateLocalVideoTrack(
    763     const std::string& id, cricket::VideoCapturer* capturer) {
    764   if (!capturer) {
    765     LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer.";
    766     return NULL;
    767   }
    768 
    769   // Create video source from the |capturer|.
    770   scoped_refptr<webrtc::VideoSourceInterface> source =
    771       pc_factory_->CreateVideoSource(capturer, NULL).get();
    772 
    773   // Create native track from the source.
    774   return pc_factory_->CreateVideoTrack(id, source.get()).get();
    775 }
    776 
    777 scoped_refptr<webrtc::AudioTrackInterface>
    778 MediaStreamDependencyFactory::CreateLocalAudioTrack(
    779     const std::string& id,
    780     const scoped_refptr<WebRtcAudioCapturer>& capturer,
    781     WebAudioCapturerSource* webaudio_source,
    782     webrtc::AudioSourceInterface* source,
    783     const webrtc::MediaConstraintsInterface* constraints) {
    784   // TODO(xians): Merge |source| to the capturer(). We can't do this today
    785   // because only one capturer() is supported while one |source| is created
    786   // for each audio track.
    787   scoped_refptr<WebRtcLocalAudioTrack> audio_track(
    788       WebRtcLocalAudioTrack::Create(id, capturer, webaudio_source,
    789                                     source, constraints));
    790 
    791   // Add the WebRtcAudioDevice as the sink to the local audio track.
    792   audio_track->AddSink(GetWebRtcAudioDevice());
    793   // Start the audio track. This will hook the |audio_track| to the capturer
    794   // as the sink of the audio, and only start the source of the capturer if
    795   // it is the first audio track connecting to the capturer.
    796   audio_track->Start();
    797   return audio_track;
    798 }
    799 
    800 webrtc::SessionDescriptionInterface*
    801 MediaStreamDependencyFactory::CreateSessionDescription(
    802     const std::string& type,
    803     const std::string& sdp,
    804     webrtc::SdpParseError* error) {
    805   return webrtc::CreateSessionDescription(type, sdp, error);
    806 }
    807 
    808 webrtc::IceCandidateInterface* MediaStreamDependencyFactory::CreateIceCandidate(
    809     const std::string& sdp_mid,
    810     int sdp_mline_index,
    811     const std::string& sdp) {
    812   return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp);
    813 }
    814 
    815 WebRtcAudioDeviceImpl*
    816 MediaStreamDependencyFactory::GetWebRtcAudioDevice() {
    817   return audio_device_.get();
    818 }
    819 
    820 void MediaStreamDependencyFactory::InitializeWorkerThread(
    821     talk_base::Thread** thread,
    822     base::WaitableEvent* event) {
    823   jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
    824   jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
    825   *thread = jingle_glue::JingleThreadWrapper::current();
    826   event->Signal();
    827 }
    828 
    829 void MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread(
    830     base::WaitableEvent* event) {
    831   DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
    832   network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get());
    833   event->Signal();
    834 }
    835 
    836 void MediaStreamDependencyFactory::DeleteIpcNetworkManager() {
    837   DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
    838   delete network_manager_;
    839   network_manager_ = NULL;
    840 }
    841 
    842 bool MediaStreamDependencyFactory::EnsurePeerConnectionFactory() {
    843   DCHECK(CalledOnValidThread());
    844   if (PeerConnectionFactoryCreated())
    845     return true;
    846 
    847   if (!signaling_thread_) {
    848     jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
    849     jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
    850     signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
    851     CHECK(signaling_thread_);
    852   }
    853 
    854   if (!worker_thread_) {
    855     if (!chrome_worker_thread_.IsRunning()) {
    856       if (!chrome_worker_thread_.Start()) {
    857         LOG(ERROR) << "Could not start worker thread";
    858         signaling_thread_ = NULL;
    859         return false;
    860       }
    861     }
    862     base::WaitableEvent event(true, false);
    863     chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
    864         &MediaStreamDependencyFactory::InitializeWorkerThread,
    865         base::Unretained(this),
    866         &worker_thread_,
    867         &event));
    868     event.Wait();
    869     DCHECK(worker_thread_);
    870   }
    871 
    872   if (!network_manager_) {
    873     base::WaitableEvent event(true, false);
    874     chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
    875         &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
    876         base::Unretained(this),
    877         &event));
    878     event.Wait();
    879   }
    880 
    881   if (!socket_factory_) {
    882     socket_factory_.reset(
    883         new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
    884   }
    885 
    886   // Init SSL, which will be needed by PeerConnection.
    887 #if defined(USE_OPENSSL)
    888   if (!talk_base::InitializeSSL()) {
    889     LOG(ERROR) << "Failed on InitializeSSL.";
    890     return false;
    891   }
    892 #else
    893   // TODO(ronghuawu): Replace this call with InitializeSSL.
    894   net::EnsureNSSSSLInit();
    895 #endif
    896 
    897   if (!CreatePeerConnectionFactory()) {
    898     LOG(ERROR) << "Could not create PeerConnection factory";
    899     return false;
    900   }
    901   return true;
    902 }
    903 
    904 void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() {
    905   pc_factory_ = NULL;
    906   if (network_manager_) {
    907     // The network manager needs to free its resources on the thread they were
    908     // created, which is the worked thread.
    909     if (chrome_worker_thread_.IsRunning()) {
    910       chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
    911           &MediaStreamDependencyFactory::DeleteIpcNetworkManager,
    912           base::Unretained(this)));
    913       // Stopping the thread will wait until all tasks have been
    914       // processed before returning. We wait for the above task to finish before
    915       // letting the the function continue to avoid any potential race issues.
    916       chrome_worker_thread_.Stop();
    917     } else {
    918       NOTREACHED() << "Worker thread not running.";
    919     }
    920   }
    921 }
    922 
    923 scoped_refptr<WebRtcAudioCapturer>
    924 MediaStreamDependencyFactory::MaybeCreateAudioCapturer(
    925     int render_view_id,
    926     const StreamDeviceInfo& device_info) {
    927   // TODO(xians): Handle the cases when gUM is called without a proper render
    928   // view, for example, by an extension.
    929   DCHECK_GE(render_view_id, 0);
    930 
    931   scoped_refptr<WebRtcAudioCapturer> capturer =
    932       GetWebRtcAudioDevice()->GetDefaultCapturer();
    933 
    934   // If the default capturer does not exist or |render_view_id| == -1, create
    935   // a new capturer.
    936   bool is_new_capturer = false;
    937   if (!capturer.get()) {
    938     capturer = WebRtcAudioCapturer::CreateCapturer();
    939     is_new_capturer = true;
    940   }
    941 
    942   if (!capturer->Initialize(
    943           render_view_id,
    944           static_cast<media::ChannelLayout>(
    945               device_info.device.input.channel_layout),
    946           device_info.device.input.sample_rate,
    947           device_info.device.input.frames_per_buffer,
    948           device_info.session_id,
    949           device_info.device.id,
    950           device_info.device.matched_output.sample_rate,
    951           device_info.device.matched_output.frames_per_buffer,
    952           device_info.device.input.effects)) {
    953     return NULL;
    954   }
    955 
    956   // Add the capturer to the WebRtcAudioDeviceImpl if it is a new capturer.
    957   if (is_new_capturer)
    958     GetWebRtcAudioDevice()->AddAudioCapturer(capturer);
    959 
    960   return capturer;
    961 }
    962 
    963 void MediaStreamDependencyFactory::AddNativeTrackToBlinkTrack(
    964     webrtc::MediaStreamTrackInterface* native_track,
    965     const blink::WebMediaStreamTrack& webkit_track,
    966     bool is_local_track) {
    967   DCHECK(!webkit_track.isNull() && !webkit_track.extraData());
    968   blink::WebMediaStreamTrack track = webkit_track;
    969 
    970   if (track.source().type() == blink::WebMediaStreamSource::TypeVideo) {
    971     track.setExtraData(new MediaStreamVideoTrack(
    972         static_cast<webrtc::VideoTrackInterface*>(native_track),
    973         is_local_track));
    974   } else {
    975     track.setExtraData(new MediaStreamTrackExtraData(native_track,
    976                                                      is_local_track));
    977   }
    978 }
    979 
    980 webrtc::MediaStreamInterface*
    981 MediaStreamDependencyFactory::GetNativeMediaStream(
    982     const blink::WebMediaStream& stream) {
    983   if (stream.isNull())
    984     return NULL;
    985   MediaStreamExtraData* extra_data =
    986       static_cast<MediaStreamExtraData*>(stream.extraData());
    987   return extra_data ? extra_data->stream().get() : NULL;
    988 }
    989 
    990 webrtc::MediaStreamTrackInterface*
    991 MediaStreamDependencyFactory::GetNativeMediaStreamTrack(
    992       const blink::WebMediaStreamTrack& track) {
    993   if (track.isNull())
    994     return NULL;
    995   MediaStreamTrackExtraData* extra_data =
    996       static_cast<MediaStreamTrackExtraData*>(track.extraData());
    997   return extra_data ? extra_data->track().get() : NULL;
    998 }
    999 
   1000 }  // namespace content
   1001