1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "content/renderer/media/media_stream_center.h" 6 7 #include <string> 8 9 #include "base/command_line.h" 10 #include "base/logging.h" 11 #include "content/common/media/media_stream_messages.h" 12 #include "content/public/common/content_switches.h" 13 #include "content/public/renderer/media_stream_audio_sink.h" 14 #include "content/public/renderer/render_thread.h" 15 #include "content/renderer/media/media_stream.h" 16 #include "content/renderer/media/media_stream_source.h" 17 #include "content/renderer/media/media_stream_video_source.h" 18 #include "content/renderer/media/media_stream_video_track.h" 19 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" 20 #include "content/renderer/media/webrtc_local_audio_source_provider.h" 21 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" 22 #include "third_party/WebKit/public/platform/WebMediaStream.h" 23 #include "third_party/WebKit/public/platform/WebMediaStreamCenterClient.h" 24 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" 25 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" 26 #include "third_party/WebKit/public/platform/WebMediaStreamTrackSourcesRequest.h" 27 #include "third_party/WebKit/public/platform/WebSourceInfo.h" 28 #include "third_party/WebKit/public/platform/WebVector.h" 29 #include "third_party/WebKit/public/web/WebFrame.h" 30 31 using blink::WebFrame; 32 using blink::WebView; 33 34 namespace content { 35 36 namespace { 37 38 void CreateNativeAudioMediaStreamTrack( 39 const blink::WebMediaStreamTrack& track, 40 PeerConnectionDependencyFactory* factory) { 41 DCHECK(!track.extraData()); 42 blink::WebMediaStreamSource source = track.source(); 43 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); 44 factory->CreateLocalAudioTrack(track); 45 } 46 47 void CreateNativeVideoMediaStreamTrack( 48 const blink::WebMediaStreamTrack& track) { 49 DCHECK(track.extraData() == NULL); 50 blink::WebMediaStreamSource source = track.source(); 51 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); 52 MediaStreamVideoSource* native_source = 53 MediaStreamVideoSource::GetVideoSource(source); 54 DCHECK(native_source); 55 blink::WebMediaStreamTrack writable_track(track); 56 // TODO(perkj): The constraints to use here should be passed from blink when 57 // a new track is created. For cloning, it should be the constraints of the 58 // cloned track and not the originating source. 59 // Also - source.constraints() returns an uninitialized constraint if the 60 // source is coming from a remote video track. See http://crbug/287805. 61 blink::WebMediaConstraints constraints = source.constraints(); 62 if (constraints.isNull()) 63 constraints.initialize(); 64 writable_track.setExtraData( 65 new MediaStreamVideoTrack(native_source, constraints, 66 MediaStreamVideoSource::ConstraintsCallback(), 67 track.isEnabled())); 68 } 69 70 void CreateNativeMediaStreamTrack(const blink::WebMediaStreamTrack& track, 71 PeerConnectionDependencyFactory* factory) { 72 DCHECK(!track.isNull() && !track.extraData()); 73 DCHECK(!track.source().isNull()); 74 75 switch (track.source().type()) { 76 case blink::WebMediaStreamSource::TypeAudio: 77 CreateNativeAudioMediaStreamTrack(track, factory); 78 break; 79 case blink::WebMediaStreamSource::TypeVideo: 80 CreateNativeVideoMediaStreamTrack(track); 81 break; 82 } 83 } 84 85 } // namespace 86 87 MediaStreamCenter::MediaStreamCenter(blink::WebMediaStreamCenterClient* client, 88 PeerConnectionDependencyFactory* factory) 89 : rtc_factory_(factory), next_request_id_(0) {} 90 91 MediaStreamCenter::~MediaStreamCenter() {} 92 93 bool MediaStreamCenter::getMediaStreamTrackSources( 94 const blink::WebMediaStreamTrackSourcesRequest& request) { 95 int request_id = next_request_id_++; 96 requests_.insert(std::make_pair(request_id, request)); 97 RenderThread::Get()->Send(new MediaStreamHostMsg_GetSources( 98 request_id, 99 GURL(request.origin().utf8()))); 100 return true; 101 } 102 103 void MediaStreamCenter::didCreateMediaStreamTrack( 104 const blink::WebMediaStreamTrack& track) { 105 DVLOG(1) << "MediaStreamCenter::didCreateMediaStreamTrack"; 106 CreateNativeMediaStreamTrack(track, rtc_factory_); 107 } 108 109 void MediaStreamCenter::didEnableMediaStreamTrack( 110 const blink::WebMediaStreamTrack& track) { 111 MediaStreamTrack* native_track = 112 MediaStreamTrack::GetTrack(track); 113 if (native_track) 114 native_track->SetEnabled(true); 115 } 116 117 void MediaStreamCenter::didDisableMediaStreamTrack( 118 const blink::WebMediaStreamTrack& track) { 119 MediaStreamTrack* native_track = 120 MediaStreamTrack::GetTrack(track); 121 if (native_track) 122 native_track->SetEnabled(false); 123 } 124 125 bool MediaStreamCenter::didStopMediaStreamTrack( 126 const blink::WebMediaStreamTrack& track) { 127 DVLOG(1) << "MediaStreamCenter::didStopMediaStreamTrack"; 128 MediaStreamTrack* native_track = MediaStreamTrack::GetTrack(track); 129 native_track->Stop(); 130 return true; 131 } 132 133 blink::WebAudioSourceProvider* 134 MediaStreamCenter::createWebAudioSourceFromMediaStreamTrack( 135 const blink::WebMediaStreamTrack& track) { 136 DVLOG(1) << "MediaStreamCenter::createWebAudioSourceFromMediaStreamTrack"; 137 MediaStreamTrack* media_stream_track = 138 static_cast<MediaStreamTrack*>(track.extraData()); 139 // Only local audio track is supported now. 140 // TODO(xians): Support remote audio track. 141 if (!media_stream_track || !media_stream_track->is_local_track()) { 142 NOTIMPLEMENTED(); 143 return NULL; 144 } 145 146 blink::WebMediaStreamSource source = track.source(); 147 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); 148 WebRtcLocalAudioSourceProvider* source_provider = 149 new WebRtcLocalAudioSourceProvider(track); 150 return source_provider; 151 } 152 153 void MediaStreamCenter::didStopLocalMediaStream( 154 const blink::WebMediaStream& stream) { 155 DVLOG(1) << "MediaStreamCenter::didStopLocalMediaStream"; 156 MediaStream* native_stream = MediaStream::GetMediaStream(stream); 157 if (!native_stream) { 158 NOTREACHED(); 159 return; 160 } 161 162 // TODO(perkj): MediaStream::Stop is being deprecated. But for the moment we 163 // need to support both MediaStream::Stop and MediaStreamTrack::Stop. 164 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; 165 stream.audioTracks(audio_tracks); 166 for (size_t i = 0; i < audio_tracks.size(); ++i) 167 didStopMediaStreamTrack(audio_tracks[i]); 168 169 blink::WebVector<blink::WebMediaStreamTrack> video_tracks; 170 stream.videoTracks(video_tracks); 171 for (size_t i = 0; i < video_tracks.size(); ++i) 172 didStopMediaStreamTrack(video_tracks[i]); 173 } 174 175 void MediaStreamCenter::didCreateMediaStream(blink::WebMediaStream& stream) { 176 DVLOG(1) << "MediaStreamCenter::didCreateMediaStream"; 177 blink::WebMediaStream writable_stream(stream); 178 MediaStream* native_stream( 179 new MediaStream(stream)); 180 writable_stream.setExtraData(native_stream); 181 182 blink::WebVector<blink::WebMediaStreamTrack> video_tracks; 183 stream.videoTracks(video_tracks); 184 for (size_t i = 0; i < video_tracks.size(); ++i) { 185 if (!MediaStreamTrack::GetTrack(video_tracks[i])) 186 CreateNativeMediaStreamTrack(video_tracks[i], rtc_factory_); 187 } 188 } 189 190 bool MediaStreamCenter::didAddMediaStreamTrack( 191 const blink::WebMediaStream& stream, 192 const blink::WebMediaStreamTrack& track) { 193 DVLOG(1) << "MediaStreamCenter::didAddMediaStreamTrack"; 194 MediaStream* native_stream = MediaStream::GetMediaStream(stream); 195 return native_stream->AddTrack(track); 196 } 197 198 bool MediaStreamCenter::didRemoveMediaStreamTrack( 199 const blink::WebMediaStream& stream, 200 const blink::WebMediaStreamTrack& track) { 201 DVLOG(1) << "MediaStreamCenter::didRemoveMediaStreamTrack"; 202 MediaStream* native_stream = MediaStream::GetMediaStream(stream); 203 return native_stream->RemoveTrack(track); 204 } 205 206 bool MediaStreamCenter::OnControlMessageReceived(const IPC::Message& message) { 207 bool handled = true; 208 IPC_BEGIN_MESSAGE_MAP(MediaStreamCenter, message) 209 IPC_MESSAGE_HANDLER(MediaStreamMsg_GetSourcesACK, 210 OnGetSourcesComplete) 211 IPC_MESSAGE_UNHANDLED(handled = false) 212 IPC_END_MESSAGE_MAP() 213 return handled; 214 } 215 216 void MediaStreamCenter::OnGetSourcesComplete( 217 int request_id, 218 const content::StreamDeviceInfoArray& devices) { 219 RequestMap::iterator request_it = requests_.find(request_id); 220 DCHECK(request_it != requests_.end()); 221 222 blink::WebVector<blink::WebSourceInfo> sourceInfos(devices.size()); 223 for (size_t i = 0; i < devices.size(); ++i) { 224 const MediaStreamDevice& device = devices[i].device; 225 DCHECK(device.type == MEDIA_DEVICE_AUDIO_CAPTURE || 226 device.type == MEDIA_DEVICE_VIDEO_CAPTURE); 227 blink::WebSourceInfo::VideoFacingMode video_facing; 228 switch (device.video_facing) { 229 case MEDIA_VIDEO_FACING_USER: 230 video_facing = blink::WebSourceInfo::VideoFacingModeUser; 231 break; 232 case MEDIA_VIDEO_FACING_ENVIRONMENT: 233 video_facing = blink::WebSourceInfo::VideoFacingModeEnvironment; 234 break; 235 default: 236 video_facing = blink::WebSourceInfo::VideoFacingModeNone; 237 } 238 239 sourceInfos[i] 240 .initialize(blink::WebString::fromUTF8(device.id), 241 device.type == MEDIA_DEVICE_AUDIO_CAPTURE 242 ? blink::WebSourceInfo::SourceKindAudio 243 : blink::WebSourceInfo::SourceKindVideo, 244 blink::WebString::fromUTF8(device.name), 245 video_facing); 246 } 247 request_it->second.requestSucceeded(sourceInfos); 248 } 249 250 } // namespace content 251