1 /* 2 * libjingle 3 * Copyright 2012, Google Inc. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions are met: 7 * 8 * 1. Redistributions of source code must retain the above copyright notice, 9 * this list of conditions and the following disclaimer. 10 * 2. Redistributions in binary form must reproduce the above copyright notice, 11 * this list of conditions and the following disclaimer in the documentation 12 * and/or other materials provided with the distribution. 13 * 3. The name of the author may not be used to endorse or promote products 14 * derived from this software without specific prior written permission. 15 * 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED 17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO 19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 */ 27 28 #include "talk/app/webrtc/mediastreamsignaling.h" 29 30 #include <vector> 31 32 #include "talk/app/webrtc/audiotrack.h" 33 #include "talk/app/webrtc/mediaconstraintsinterface.h" 34 #include "talk/app/webrtc/mediastreamproxy.h" 35 #include "talk/app/webrtc/mediastreamtrackproxy.h" 36 #include "talk/app/webrtc/remoteaudiosource.h" 37 #include "talk/app/webrtc/remotevideocapturer.h" 38 #include "talk/app/webrtc/sctputils.h" 39 #include "talk/app/webrtc/videosource.h" 40 #include "talk/app/webrtc/videotrack.h" 41 #include "talk/media/sctp/sctpdataengine.h" 42 #include "webrtc/base/bytebuffer.h" 43 #include "webrtc/base/stringutils.h" 44 45 static const char kDefaultStreamLabel[] = "default"; 46 static const char kDefaultAudioTrackLabel[] = "defaulta0"; 47 static const char kDefaultVideoTrackLabel[] = "defaultv0"; 48 49 namespace webrtc { 50 51 using rtc::scoped_ptr; 52 using rtc::scoped_refptr; 53 54 static bool ParseConstraintsForAnswer( 55 const MediaConstraintsInterface* constraints, 56 cricket::MediaSessionOptions* options) { 57 bool value; 58 size_t mandatory_constraints_satisfied = 0; 59 60 if (FindConstraint(constraints, 61 MediaConstraintsInterface::kOfferToReceiveAudio, 62 &value, &mandatory_constraints_satisfied)) { 63 // |options-|has_audio| can only change from false to 64 // true, but never change from true to false. This is to make sure 65 // CreateOffer / CreateAnswer doesn't remove a media content 66 // description that has been created. 67 options->has_audio |= value; 68 } else { 69 // kOfferToReceiveAudio defaults to true according to spec. 70 options->has_audio = true; 71 } 72 73 if (FindConstraint(constraints, 74 MediaConstraintsInterface::kOfferToReceiveVideo, 75 &value, &mandatory_constraints_satisfied)) { 76 // |options->has_video| can only change from false to 77 // true, but never change from true to false. This is to make sure 78 // CreateOffer / CreateAnswer doesn't remove a media content 79 // description that has been created. 80 options->has_video |= value; 81 } else { 82 // kOfferToReceiveVideo defaults to false according to spec. But 83 // if it is an answer and video is offered, we should still accept video 84 // per default. 85 options->has_video = true; 86 } 87 88 if (FindConstraint(constraints, 89 MediaConstraintsInterface::kVoiceActivityDetection, 90 &value, &mandatory_constraints_satisfied)) { 91 options->vad_enabled = value; 92 } 93 94 if (FindConstraint(constraints, 95 MediaConstraintsInterface::kUseRtpMux, 96 &value, &mandatory_constraints_satisfied)) { 97 options->bundle_enabled = value; 98 } else { 99 // kUseRtpMux defaults to true according to spec. 100 options->bundle_enabled = true; 101 } 102 if (FindConstraint(constraints, 103 MediaConstraintsInterface::kIceRestart, 104 &value, &mandatory_constraints_satisfied)) { 105 options->transport_options.ice_restart = value; 106 } else { 107 // kIceRestart defaults to false according to spec. 108 options->transport_options.ice_restart = false; 109 } 110 111 if (!constraints) { 112 return true; 113 } 114 return mandatory_constraints_satisfied == constraints->GetMandatory().size(); 115 } 116 117 // Returns true if if at least one media content is present and 118 // |options.bundle_enabled| is true. 119 // Bundle will be enabled by default if at least one media content is present 120 // and the constraint kUseRtpMux has not disabled bundle. 121 static bool EvaluateNeedForBundle(const cricket::MediaSessionOptions& options) { 122 return options.bundle_enabled && 123 (options.has_audio || options.has_video || options.has_data()); 124 } 125 126 static bool MediaContentDirectionHasSend(cricket::MediaContentDirection dir) { 127 return dir == cricket::MD_SENDONLY || dir == cricket::MD_SENDRECV; 128 } 129 130 static bool IsValidOfferToReceiveMedia(int value) { 131 typedef PeerConnectionInterface::RTCOfferAnswerOptions Options; 132 return (value >= Options::kUndefined) && 133 (value <= Options::kMaxOfferToReceiveMedia); 134 } 135 136 // Add the stream and RTP data channel info to |session_options|. 137 static void SetStreams( 138 cricket::MediaSessionOptions* session_options, 139 rtc::scoped_refptr<StreamCollection> streams, 140 const MediaStreamSignaling::RtpDataChannels& rtp_data_channels) { 141 session_options->streams.clear(); 142 if (streams != NULL) { 143 for (size_t i = 0; i < streams->count(); ++i) { 144 MediaStreamInterface* stream = streams->at(i); 145 146 AudioTrackVector audio_tracks(stream->GetAudioTracks()); 147 148 // For each audio track in the stream, add it to the MediaSessionOptions. 149 for (size_t j = 0; j < audio_tracks.size(); ++j) { 150 scoped_refptr<MediaStreamTrackInterface> track(audio_tracks[j]); 151 session_options->AddStream( 152 cricket::MEDIA_TYPE_AUDIO, track->id(), stream->label()); 153 } 154 155 VideoTrackVector video_tracks(stream->GetVideoTracks()); 156 157 // For each video track in the stream, add it to the MediaSessionOptions. 158 for (size_t j = 0; j < video_tracks.size(); ++j) { 159 scoped_refptr<MediaStreamTrackInterface> track(video_tracks[j]); 160 session_options->AddStream( 161 cricket::MEDIA_TYPE_VIDEO, track->id(), stream->label()); 162 } 163 } 164 } 165 166 // Check for data channels. 167 MediaStreamSignaling::RtpDataChannels::const_iterator data_channel_it = 168 rtp_data_channels.begin(); 169 for (; data_channel_it != rtp_data_channels.end(); ++data_channel_it) { 170 const DataChannel* channel = data_channel_it->second; 171 if (channel->state() == DataChannel::kConnecting || 172 channel->state() == DataChannel::kOpen) { 173 // |streamid| and |sync_label| are both set to the DataChannel label 174 // here so they can be signaled the same way as MediaStreams and Tracks. 175 // For MediaStreams, the sync_label is the MediaStream label and the 176 // track label is the same as |streamid|. 177 const std::string& streamid = channel->label(); 178 const std::string& sync_label = channel->label(); 179 session_options->AddStream( 180 cricket::MEDIA_TYPE_DATA, streamid, sync_label); 181 } 182 } 183 } 184 185 // Factory class for creating remote MediaStreams and MediaStreamTracks. 186 class RemoteMediaStreamFactory { 187 public: 188 explicit RemoteMediaStreamFactory(rtc::Thread* signaling_thread, 189 cricket::ChannelManager* channel_manager) 190 : signaling_thread_(signaling_thread), 191 channel_manager_(channel_manager) { 192 } 193 194 rtc::scoped_refptr<MediaStreamInterface> CreateMediaStream( 195 const std::string& stream_label) { 196 return MediaStreamProxy::Create( 197 signaling_thread_, MediaStream::Create(stream_label)); 198 } 199 200 AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream, 201 const std::string& track_id) { 202 return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>( 203 stream, track_id, RemoteAudioSource::Create().get()); 204 } 205 206 VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream, 207 const std::string& track_id) { 208 return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>( 209 stream, track_id, VideoSource::Create(channel_manager_, 210 new RemoteVideoCapturer(), 211 NULL).get()); 212 } 213 214 private: 215 template <typename TI, typename T, typename TP, typename S> 216 TI* AddTrack(MediaStreamInterface* stream, const std::string& track_id, 217 S* source) { 218 rtc::scoped_refptr<TI> track( 219 TP::Create(signaling_thread_, T::Create(track_id, source))); 220 track->set_state(webrtc::MediaStreamTrackInterface::kLive); 221 if (stream->AddTrack(track)) { 222 return track; 223 } 224 return NULL; 225 } 226 227 rtc::Thread* signaling_thread_; 228 cricket::ChannelManager* channel_manager_; 229 }; 230 231 MediaStreamSignaling::MediaStreamSignaling( 232 rtc::Thread* signaling_thread, 233 MediaStreamSignalingObserver* stream_observer, 234 cricket::ChannelManager* channel_manager) 235 : signaling_thread_(signaling_thread), 236 data_channel_factory_(NULL), 237 stream_observer_(stream_observer), 238 local_streams_(StreamCollection::Create()), 239 remote_streams_(StreamCollection::Create()), 240 remote_stream_factory_(new RemoteMediaStreamFactory(signaling_thread, 241 channel_manager)), 242 last_allocated_sctp_even_sid_(-2), 243 last_allocated_sctp_odd_sid_(-1) { 244 } 245 246 MediaStreamSignaling::~MediaStreamSignaling() { 247 } 248 249 void MediaStreamSignaling::TearDown() { 250 OnAudioChannelClose(); 251 OnVideoChannelClose(); 252 OnDataChannelClose(); 253 } 254 255 bool MediaStreamSignaling::IsSctpSidAvailable(int sid) const { 256 if (sid < 0 || sid > static_cast<int>(cricket::kMaxSctpSid)) 257 return false; 258 259 return FindDataChannelBySid(sid) < 0; 260 } 261 262 // Gets the first unused odd/even id based on the DTLS role. If |role| is 263 // SSL_CLIENT, the allocated id starts from 0 and takes even numbers; otherwise, 264 // the id starts from 1 and takes odd numbers. Returns false if no id can be 265 // allocated. 266 bool MediaStreamSignaling::AllocateSctpSid(rtc::SSLRole role, int* sid) { 267 int& last_id = (role == rtc::SSL_CLIENT) ? 268 last_allocated_sctp_even_sid_ : last_allocated_sctp_odd_sid_; 269 270 do { 271 last_id += 2; 272 } while (last_id <= static_cast<int>(cricket::kMaxSctpSid) && 273 !IsSctpSidAvailable(last_id)); 274 275 if (last_id > static_cast<int>(cricket::kMaxSctpSid)) { 276 return false; 277 } 278 279 *sid = last_id; 280 return true; 281 } 282 283 bool MediaStreamSignaling::HasDataChannels() const { 284 return !rtp_data_channels_.empty() || !sctp_data_channels_.empty(); 285 } 286 287 bool MediaStreamSignaling::AddDataChannel(DataChannel* data_channel) { 288 ASSERT(data_channel != NULL); 289 if (data_channel->data_channel_type() == cricket::DCT_RTP) { 290 if (rtp_data_channels_.find(data_channel->label()) != 291 rtp_data_channels_.end()) { 292 LOG(LS_ERROR) << "DataChannel with label " << data_channel->label() 293 << " already exists."; 294 return false; 295 } 296 rtp_data_channels_[data_channel->label()] = data_channel; 297 } else { 298 ASSERT(data_channel->data_channel_type() == cricket::DCT_SCTP); 299 sctp_data_channels_.push_back(data_channel); 300 } 301 return true; 302 } 303 304 bool MediaStreamSignaling::AddDataChannelFromOpenMessage( 305 const cricket::ReceiveDataParams& params, 306 const rtc::Buffer& payload) { 307 if (!data_channel_factory_) { 308 LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels " 309 << "are not supported."; 310 return false; 311 } 312 313 std::string label; 314 InternalDataChannelInit config; 315 config.id = params.ssrc; 316 if (!ParseDataChannelOpenMessage(payload, &label, &config)) { 317 LOG(LS_WARNING) << "Failed to parse the OPEN message for sid " 318 << params.ssrc; 319 return false; 320 } 321 config.open_handshake_role = InternalDataChannelInit::kAcker; 322 323 scoped_refptr<DataChannel> channel( 324 data_channel_factory_->CreateDataChannel(label, &config)); 325 if (!channel.get()) { 326 LOG(LS_ERROR) << "Failed to create DataChannel from the OPEN message."; 327 return false; 328 } 329 330 stream_observer_->OnAddDataChannel(channel); 331 return true; 332 } 333 334 void MediaStreamSignaling::RemoveSctpDataChannel(int sid) { 335 ASSERT(sid >= 0); 336 for (SctpDataChannels::iterator iter = sctp_data_channels_.begin(); 337 iter != sctp_data_channels_.end(); 338 ++iter) { 339 if ((*iter)->id() == sid) { 340 sctp_data_channels_.erase(iter); 341 342 if (rtc::IsEven(sid) && sid <= last_allocated_sctp_even_sid_) { 343 last_allocated_sctp_even_sid_ = sid - 2; 344 } else if (rtc::IsOdd(sid) && sid <= last_allocated_sctp_odd_sid_) { 345 last_allocated_sctp_odd_sid_ = sid - 2; 346 } 347 return; 348 } 349 } 350 } 351 352 bool MediaStreamSignaling::AddLocalStream(MediaStreamInterface* local_stream) { 353 if (local_streams_->find(local_stream->label()) != NULL) { 354 LOG(LS_WARNING) << "MediaStream with label " << local_stream->label() 355 << "already exist."; 356 return false; 357 } 358 local_streams_->AddStream(local_stream); 359 360 // Find tracks that has already been configured in SDP. This can occur if a 361 // local session description that contains the MSID of these tracks is set 362 // before AddLocalStream is called. It can also occur if the local session 363 // description is not changed and RemoveLocalStream 364 // is called and later AddLocalStream is called again with the same stream. 365 AudioTrackVector audio_tracks = local_stream->GetAudioTracks(); 366 for (AudioTrackVector::const_iterator it = audio_tracks.begin(); 367 it != audio_tracks.end(); ++it) { 368 const TrackInfo* track_info = FindTrackInfo(local_audio_tracks_, 369 local_stream->label(), 370 (*it)->id()); 371 if (track_info) { 372 OnLocalTrackSeen(track_info->stream_label, track_info->track_id, 373 track_info->ssrc, cricket::MEDIA_TYPE_AUDIO); 374 } 375 } 376 377 VideoTrackVector video_tracks = local_stream->GetVideoTracks(); 378 for (VideoTrackVector::const_iterator it = video_tracks.begin(); 379 it != video_tracks.end(); ++it) { 380 const TrackInfo* track_info = FindTrackInfo(local_video_tracks_, 381 local_stream->label(), 382 (*it)->id()); 383 if (track_info) { 384 OnLocalTrackSeen(track_info->stream_label, track_info->track_id, 385 track_info->ssrc, cricket::MEDIA_TYPE_VIDEO); 386 } 387 } 388 return true; 389 } 390 391 void MediaStreamSignaling::RemoveLocalStream( 392 MediaStreamInterface* local_stream) { 393 AudioTrackVector audio_tracks = local_stream->GetAudioTracks(); 394 for (AudioTrackVector::const_iterator it = audio_tracks.begin(); 395 it != audio_tracks.end(); ++it) { 396 const TrackInfo* track_info = FindTrackInfo(local_audio_tracks_, 397 local_stream->label(), 398 (*it)->id()); 399 if (track_info) { 400 stream_observer_->OnRemoveLocalAudioTrack(local_stream, *it, 401 track_info->ssrc); 402 } 403 } 404 VideoTrackVector video_tracks = local_stream->GetVideoTracks(); 405 for (VideoTrackVector::const_iterator it = video_tracks.begin(); 406 it != video_tracks.end(); ++it) { 407 const TrackInfo* track_info = FindTrackInfo(local_video_tracks_, 408 local_stream->label(), 409 (*it)->id()); 410 if (track_info) { 411 stream_observer_->OnRemoveLocalVideoTrack(local_stream, *it); 412 } 413 } 414 415 local_streams_->RemoveStream(local_stream); 416 stream_observer_->OnRemoveLocalStream(local_stream); 417 } 418 419 bool MediaStreamSignaling::GetOptionsForOffer( 420 const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options, 421 cricket::MediaSessionOptions* session_options) { 422 typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions; 423 if (!IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) || 424 !IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video)) { 425 return false; 426 } 427 428 session_options->has_audio = false; 429 session_options->has_video = false; 430 SetStreams(session_options, local_streams_, rtp_data_channels_); 431 432 // If |offer_to_receive_[audio/video]| is undefined, respect the flags set 433 // from SetStreams. Otherwise, overwrite it based on |rtc_options|. 434 if (rtc_options.offer_to_receive_audio != RTCOfferAnswerOptions::kUndefined) { 435 session_options->has_audio = rtc_options.offer_to_receive_audio > 0; 436 } 437 if (rtc_options.offer_to_receive_video != RTCOfferAnswerOptions::kUndefined) { 438 session_options->has_video = rtc_options.offer_to_receive_video > 0; 439 } 440 441 session_options->vad_enabled = rtc_options.voice_activity_detection; 442 session_options->transport_options.ice_restart = rtc_options.ice_restart; 443 session_options->bundle_enabled = rtc_options.use_rtp_mux; 444 445 session_options->bundle_enabled = EvaluateNeedForBundle(*session_options); 446 return true; 447 } 448 449 bool MediaStreamSignaling::GetOptionsForAnswer( 450 const MediaConstraintsInterface* constraints, 451 cricket::MediaSessionOptions* options) { 452 options->has_audio = false; 453 options->has_video = false; 454 SetStreams(options, local_streams_, rtp_data_channels_); 455 456 if (!ParseConstraintsForAnswer(constraints, options)) { 457 return false; 458 } 459 options->bundle_enabled = EvaluateNeedForBundle(*options); 460 return true; 461 } 462 463 // Updates or creates remote MediaStream objects given a 464 // remote SessionDesription. 465 // If the remote SessionDesription contains new remote MediaStreams 466 // the observer OnAddStream method is called. If a remote MediaStream is missing 467 // from the remote SessionDescription OnRemoveStream is called. 468 void MediaStreamSignaling::OnRemoteDescriptionChanged( 469 const SessionDescriptionInterface* desc) { 470 const cricket::SessionDescription* remote_desc = desc->description(); 471 rtc::scoped_refptr<StreamCollection> new_streams( 472 StreamCollection::Create()); 473 474 // Find all audio rtp streams and create corresponding remote AudioTracks 475 // and MediaStreams. 476 const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc); 477 if (audio_content) { 478 const cricket::AudioContentDescription* desc = 479 static_cast<const cricket::AudioContentDescription*>( 480 audio_content->description); 481 UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams); 482 remote_info_.default_audio_track_needed = 483 MediaContentDirectionHasSend(desc->direction()) && 484 desc->streams().empty(); 485 } 486 487 // Find all video rtp streams and create corresponding remote VideoTracks 488 // and MediaStreams. 489 const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc); 490 if (video_content) { 491 const cricket::VideoContentDescription* desc = 492 static_cast<const cricket::VideoContentDescription*>( 493 video_content->description); 494 UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams); 495 remote_info_.default_video_track_needed = 496 MediaContentDirectionHasSend(desc->direction()) && 497 desc->streams().empty(); 498 } 499 500 // Update the DataChannels with the information from the remote peer. 501 const cricket::ContentInfo* data_content = GetFirstDataContent(remote_desc); 502 if (data_content) { 503 const cricket::DataContentDescription* data_desc = 504 static_cast<const cricket::DataContentDescription*>( 505 data_content->description); 506 if (rtc::starts_with( 507 data_desc->protocol().data(), cricket::kMediaProtocolRtpPrefix)) { 508 UpdateRemoteRtpDataChannels(data_desc->streams()); 509 } 510 } 511 512 // Iterate new_streams and notify the observer about new MediaStreams. 513 for (size_t i = 0; i < new_streams->count(); ++i) { 514 MediaStreamInterface* new_stream = new_streams->at(i); 515 stream_observer_->OnAddRemoteStream(new_stream); 516 } 517 518 // Find removed MediaStreams. 519 if (remote_info_.IsDefaultMediaStreamNeeded() && 520 remote_streams_->find(kDefaultStreamLabel) != NULL) { 521 // The default media stream already exists. No need to do anything. 522 } else { 523 UpdateEndedRemoteMediaStreams(); 524 remote_info_.msid_supported |= remote_streams_->count() > 0; 525 } 526 MaybeCreateDefaultStream(); 527 } 528 529 void MediaStreamSignaling::OnLocalDescriptionChanged( 530 const SessionDescriptionInterface* desc) { 531 const cricket::ContentInfo* audio_content = 532 GetFirstAudioContent(desc->description()); 533 if (audio_content) { 534 if (audio_content->rejected) { 535 RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO); 536 } 537 const cricket::AudioContentDescription* audio_desc = 538 static_cast<const cricket::AudioContentDescription*>( 539 audio_content->description); 540 UpdateLocalTracks(audio_desc->streams(), audio_desc->type()); 541 } 542 543 const cricket::ContentInfo* video_content = 544 GetFirstVideoContent(desc->description()); 545 if (video_content) { 546 if (video_content->rejected) { 547 RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO); 548 } 549 const cricket::VideoContentDescription* video_desc = 550 static_cast<const cricket::VideoContentDescription*>( 551 video_content->description); 552 UpdateLocalTracks(video_desc->streams(), video_desc->type()); 553 } 554 555 const cricket::ContentInfo* data_content = 556 GetFirstDataContent(desc->description()); 557 if (data_content) { 558 const cricket::DataContentDescription* data_desc = 559 static_cast<const cricket::DataContentDescription*>( 560 data_content->description); 561 if (rtc::starts_with( 562 data_desc->protocol().data(), cricket::kMediaProtocolRtpPrefix)) { 563 UpdateLocalRtpDataChannels(data_desc->streams()); 564 } 565 } 566 } 567 568 void MediaStreamSignaling::OnAudioChannelClose() { 569 RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO); 570 } 571 572 void MediaStreamSignaling::OnVideoChannelClose() { 573 RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO); 574 } 575 576 void MediaStreamSignaling::OnDataChannelClose() { 577 // Use a temporary copy of the RTP/SCTP DataChannel list because the 578 // DataChannel may callback to us and try to modify the list. 579 RtpDataChannels temp_rtp_dcs; 580 temp_rtp_dcs.swap(rtp_data_channels_); 581 RtpDataChannels::iterator it1 = temp_rtp_dcs.begin(); 582 for (; it1 != temp_rtp_dcs.end(); ++it1) { 583 it1->second->OnDataEngineClose(); 584 } 585 586 SctpDataChannels temp_sctp_dcs; 587 temp_sctp_dcs.swap(sctp_data_channels_); 588 SctpDataChannels::iterator it2 = temp_sctp_dcs.begin(); 589 for (; it2 != temp_sctp_dcs.end(); ++it2) { 590 (*it2)->OnDataEngineClose(); 591 } 592 } 593 594 void MediaStreamSignaling::UpdateRemoteStreamsList( 595 const cricket::StreamParamsVec& streams, 596 cricket::MediaType media_type, 597 StreamCollection* new_streams) { 598 TrackInfos* current_tracks = GetRemoteTracks(media_type); 599 600 // Find removed tracks. Ie tracks where the track id or ssrc don't match the 601 // new StreamParam. 602 TrackInfos::iterator track_it = current_tracks->begin(); 603 while (track_it != current_tracks->end()) { 604 const TrackInfo& info = *track_it; 605 cricket::StreamParams params; 606 if (!cricket::GetStreamBySsrc(streams, info.ssrc, ¶ms) || 607 params.id != info.track_id) { 608 OnRemoteTrackRemoved(info.stream_label, info.track_id, media_type); 609 track_it = current_tracks->erase(track_it); 610 } else { 611 ++track_it; 612 } 613 } 614 615 // Find new and active tracks. 616 for (cricket::StreamParamsVec::const_iterator it = streams.begin(); 617 it != streams.end(); ++it) { 618 // The sync_label is the MediaStream label and the |stream.id| is the 619 // track id. 620 const std::string& stream_label = it->sync_label; 621 const std::string& track_id = it->id; 622 uint32 ssrc = it->first_ssrc(); 623 624 rtc::scoped_refptr<MediaStreamInterface> stream = 625 remote_streams_->find(stream_label); 626 if (!stream) { 627 // This is a new MediaStream. Create a new remote MediaStream. 628 stream = remote_stream_factory_->CreateMediaStream(stream_label); 629 remote_streams_->AddStream(stream); 630 new_streams->AddStream(stream); 631 } 632 633 const TrackInfo* track_info = FindTrackInfo(*current_tracks, stream_label, 634 track_id); 635 if (!track_info) { 636 current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc)); 637 OnRemoteTrackSeen(stream_label, track_id, it->first_ssrc(), media_type); 638 } 639 } 640 } 641 642 void MediaStreamSignaling::OnRemoteTrackSeen(const std::string& stream_label, 643 const std::string& track_id, 644 uint32 ssrc, 645 cricket::MediaType media_type) { 646 MediaStreamInterface* stream = remote_streams_->find(stream_label); 647 648 if (media_type == cricket::MEDIA_TYPE_AUDIO) { 649 AudioTrackInterface* audio_track = 650 remote_stream_factory_->AddAudioTrack(stream, track_id); 651 stream_observer_->OnAddRemoteAudioTrack(stream, audio_track, ssrc); 652 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { 653 VideoTrackInterface* video_track = 654 remote_stream_factory_->AddVideoTrack(stream, track_id); 655 stream_observer_->OnAddRemoteVideoTrack(stream, video_track, ssrc); 656 } else { 657 ASSERT(false && "Invalid media type"); 658 } 659 } 660 661 void MediaStreamSignaling::OnRemoteTrackRemoved( 662 const std::string& stream_label, 663 const std::string& track_id, 664 cricket::MediaType media_type) { 665 MediaStreamInterface* stream = remote_streams_->find(stream_label); 666 667 if (media_type == cricket::MEDIA_TYPE_AUDIO) { 668 rtc::scoped_refptr<AudioTrackInterface> audio_track = 669 stream->FindAudioTrack(track_id); 670 if (audio_track) { 671 audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded); 672 stream->RemoveTrack(audio_track); 673 stream_observer_->OnRemoveRemoteAudioTrack(stream, audio_track); 674 } 675 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { 676 rtc::scoped_refptr<VideoTrackInterface> video_track = 677 stream->FindVideoTrack(track_id); 678 if (video_track) { 679 video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded); 680 stream->RemoveTrack(video_track); 681 stream_observer_->OnRemoveRemoteVideoTrack(stream, video_track); 682 } 683 } else { 684 ASSERT(false && "Invalid media type"); 685 } 686 } 687 688 void MediaStreamSignaling::RejectRemoteTracks(cricket::MediaType media_type) { 689 TrackInfos* current_tracks = GetRemoteTracks(media_type); 690 for (TrackInfos::iterator track_it = current_tracks->begin(); 691 track_it != current_tracks->end(); ++track_it) { 692 const TrackInfo& info = *track_it; 693 MediaStreamInterface* stream = remote_streams_->find(info.stream_label); 694 if (media_type == cricket::MEDIA_TYPE_AUDIO) { 695 AudioTrackInterface* track = stream->FindAudioTrack(info.track_id); 696 // There's no guarantee the track is still available, e.g. the track may 697 // have been removed from the stream by javascript. 698 if (track) { 699 track->set_state(webrtc::MediaStreamTrackInterface::kEnded); 700 } 701 } 702 if (media_type == cricket::MEDIA_TYPE_VIDEO) { 703 VideoTrackInterface* track = stream->FindVideoTrack(info.track_id); 704 // There's no guarantee the track is still available, e.g. the track may 705 // have been removed from the stream by javascript. 706 if (track) { 707 track->set_state(webrtc::MediaStreamTrackInterface::kEnded); 708 } 709 } 710 } 711 } 712 713 void MediaStreamSignaling::UpdateEndedRemoteMediaStreams() { 714 std::vector<scoped_refptr<MediaStreamInterface> > streams_to_remove; 715 for (size_t i = 0; i < remote_streams_->count(); ++i) { 716 MediaStreamInterface*stream = remote_streams_->at(i); 717 if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) { 718 streams_to_remove.push_back(stream); 719 } 720 } 721 722 std::vector<scoped_refptr<MediaStreamInterface> >::const_iterator it; 723 for (it = streams_to_remove.begin(); it != streams_to_remove.end(); ++it) { 724 remote_streams_->RemoveStream(*it); 725 stream_observer_->OnRemoveRemoteStream(*it); 726 } 727 } 728 729 void MediaStreamSignaling::MaybeCreateDefaultStream() { 730 if (!remote_info_.IsDefaultMediaStreamNeeded()) 731 return; 732 733 bool default_created = false; 734 735 scoped_refptr<MediaStreamInterface> default_remote_stream = 736 remote_streams_->find(kDefaultStreamLabel); 737 if (default_remote_stream == NULL) { 738 default_created = true; 739 default_remote_stream = 740 remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel); 741 remote_streams_->AddStream(default_remote_stream); 742 } 743 if (remote_info_.default_audio_track_needed && 744 default_remote_stream->GetAudioTracks().size() == 0) { 745 remote_audio_tracks_.push_back(TrackInfo(kDefaultStreamLabel, 746 kDefaultAudioTrackLabel, 0)); 747 748 OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0, 749 cricket::MEDIA_TYPE_AUDIO); 750 } 751 if (remote_info_.default_video_track_needed && 752 default_remote_stream->GetVideoTracks().size() == 0) { 753 remote_video_tracks_.push_back(TrackInfo(kDefaultStreamLabel, 754 kDefaultVideoTrackLabel, 0)); 755 OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0, 756 cricket::MEDIA_TYPE_VIDEO); 757 } 758 if (default_created) { 759 stream_observer_->OnAddRemoteStream(default_remote_stream); 760 } 761 } 762 763 MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetRemoteTracks( 764 cricket::MediaType type) { 765 if (type == cricket::MEDIA_TYPE_AUDIO) 766 return &remote_audio_tracks_; 767 else if (type == cricket::MEDIA_TYPE_VIDEO) 768 return &remote_video_tracks_; 769 ASSERT(false && "Unknown MediaType"); 770 return NULL; 771 } 772 773 MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetLocalTracks( 774 cricket::MediaType media_type) { 775 ASSERT(media_type == cricket::MEDIA_TYPE_AUDIO || 776 media_type == cricket::MEDIA_TYPE_VIDEO); 777 778 return (media_type == cricket::MEDIA_TYPE_AUDIO) ? 779 &local_audio_tracks_ : &local_video_tracks_; 780 } 781 782 void MediaStreamSignaling::UpdateLocalTracks( 783 const std::vector<cricket::StreamParams>& streams, 784 cricket::MediaType media_type) { 785 TrackInfos* current_tracks = GetLocalTracks(media_type); 786 787 // Find removed tracks. Ie tracks where the track id, stream label or ssrc 788 // don't match the new StreamParam. 789 TrackInfos::iterator track_it = current_tracks->begin(); 790 while (track_it != current_tracks->end()) { 791 const TrackInfo& info = *track_it; 792 cricket::StreamParams params; 793 if (!cricket::GetStreamBySsrc(streams, info.ssrc, ¶ms) || 794 params.id != info.track_id || params.sync_label != info.stream_label) { 795 OnLocalTrackRemoved(info.stream_label, info.track_id, info.ssrc, 796 media_type); 797 track_it = current_tracks->erase(track_it); 798 } else { 799 ++track_it; 800 } 801 } 802 803 // Find new and active tracks. 804 for (cricket::StreamParamsVec::const_iterator it = streams.begin(); 805 it != streams.end(); ++it) { 806 // The sync_label is the MediaStream label and the |stream.id| is the 807 // track id. 808 const std::string& stream_label = it->sync_label; 809 const std::string& track_id = it->id; 810 uint32 ssrc = it->first_ssrc(); 811 const TrackInfo* track_info = FindTrackInfo(*current_tracks, 812 stream_label, 813 track_id); 814 if (!track_info) { 815 current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc)); 816 OnLocalTrackSeen(stream_label, track_id, it->first_ssrc(), 817 media_type); 818 } 819 } 820 } 821 822 void MediaStreamSignaling::OnLocalTrackSeen( 823 const std::string& stream_label, 824 const std::string& track_id, 825 uint32 ssrc, 826 cricket::MediaType media_type) { 827 MediaStreamInterface* stream = local_streams_->find(stream_label); 828 if (!stream) { 829 LOG(LS_WARNING) << "An unknown local MediaStream with label " 830 << stream_label << " has been configured."; 831 return; 832 } 833 834 if (media_type == cricket::MEDIA_TYPE_AUDIO) { 835 AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id); 836 if (!audio_track) { 837 LOG(LS_WARNING) << "An unknown local AudioTrack with id , " 838 << track_id << " has been configured."; 839 return; 840 } 841 stream_observer_->OnAddLocalAudioTrack(stream, audio_track, ssrc); 842 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { 843 VideoTrackInterface* video_track = stream->FindVideoTrack(track_id); 844 if (!video_track) { 845 LOG(LS_WARNING) << "An unknown local VideoTrack with id , " 846 << track_id << " has been configured."; 847 return; 848 } 849 stream_observer_->OnAddLocalVideoTrack(stream, video_track, ssrc); 850 } else { 851 ASSERT(false && "Invalid media type"); 852 } 853 } 854 855 void MediaStreamSignaling::OnLocalTrackRemoved( 856 const std::string& stream_label, 857 const std::string& track_id, 858 uint32 ssrc, 859 cricket::MediaType media_type) { 860 MediaStreamInterface* stream = local_streams_->find(stream_label); 861 if (!stream) { 862 // This is the normal case. Ie RemoveLocalStream has been called and the 863 // SessionDescriptions has been renegotiated. 864 return; 865 } 866 // A track has been removed from the SessionDescription but the MediaStream 867 // is still associated with MediaStreamSignaling. This only occurs if the SDP 868 // doesn't match with the calls to AddLocalStream and RemoveLocalStream. 869 870 if (media_type == cricket::MEDIA_TYPE_AUDIO) { 871 AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id); 872 if (!audio_track) { 873 return; 874 } 875 stream_observer_->OnRemoveLocalAudioTrack(stream, audio_track, ssrc); 876 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { 877 VideoTrackInterface* video_track = stream->FindVideoTrack(track_id); 878 if (!video_track) { 879 return; 880 } 881 stream_observer_->OnRemoveLocalVideoTrack(stream, video_track); 882 } else { 883 ASSERT(false && "Invalid media type."); 884 } 885 } 886 887 void MediaStreamSignaling::UpdateLocalRtpDataChannels( 888 const cricket::StreamParamsVec& streams) { 889 std::vector<std::string> existing_channels; 890 891 // Find new and active data channels. 892 for (cricket::StreamParamsVec::const_iterator it =streams.begin(); 893 it != streams.end(); ++it) { 894 // |it->sync_label| is actually the data channel label. The reason is that 895 // we use the same naming of data channels as we do for 896 // MediaStreams and Tracks. 897 // For MediaStreams, the sync_label is the MediaStream label and the 898 // track label is the same as |streamid|. 899 const std::string& channel_label = it->sync_label; 900 RtpDataChannels::iterator data_channel_it = 901 rtp_data_channels_.find(channel_label); 902 if (!VERIFY(data_channel_it != rtp_data_channels_.end())) { 903 continue; 904 } 905 // Set the SSRC the data channel should use for sending. 906 data_channel_it->second->SetSendSsrc(it->first_ssrc()); 907 existing_channels.push_back(data_channel_it->first); 908 } 909 910 UpdateClosingDataChannels(existing_channels, true); 911 } 912 913 void MediaStreamSignaling::UpdateRemoteRtpDataChannels( 914 const cricket::StreamParamsVec& streams) { 915 std::vector<std::string> existing_channels; 916 917 // Find new and active data channels. 918 for (cricket::StreamParamsVec::const_iterator it = streams.begin(); 919 it != streams.end(); ++it) { 920 // The data channel label is either the mslabel or the SSRC if the mslabel 921 // does not exist. Ex a=ssrc:444330170 mslabel:test1. 922 std::string label = it->sync_label.empty() ? 923 rtc::ToString(it->first_ssrc()) : it->sync_label; 924 RtpDataChannels::iterator data_channel_it = 925 rtp_data_channels_.find(label); 926 if (data_channel_it == rtp_data_channels_.end()) { 927 // This is a new data channel. 928 CreateRemoteDataChannel(label, it->first_ssrc()); 929 } else { 930 data_channel_it->second->SetReceiveSsrc(it->first_ssrc()); 931 } 932 existing_channels.push_back(label); 933 } 934 935 UpdateClosingDataChannels(existing_channels, false); 936 } 937 938 void MediaStreamSignaling::UpdateClosingDataChannels( 939 const std::vector<std::string>& active_channels, bool is_local_update) { 940 RtpDataChannels::iterator it = rtp_data_channels_.begin(); 941 while (it != rtp_data_channels_.end()) { 942 DataChannel* data_channel = it->second; 943 if (std::find(active_channels.begin(), active_channels.end(), 944 data_channel->label()) != active_channels.end()) { 945 ++it; 946 continue; 947 } 948 949 if (is_local_update) 950 data_channel->SetSendSsrc(0); 951 else 952 data_channel->RemotePeerRequestClose(); 953 954 if (data_channel->state() == DataChannel::kClosed) { 955 rtp_data_channels_.erase(it); 956 it = rtp_data_channels_.begin(); 957 } else { 958 ++it; 959 } 960 } 961 } 962 963 void MediaStreamSignaling::CreateRemoteDataChannel(const std::string& label, 964 uint32 remote_ssrc) { 965 if (!data_channel_factory_) { 966 LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels " 967 << "are not supported."; 968 return; 969 } 970 scoped_refptr<DataChannel> channel( 971 data_channel_factory_->CreateDataChannel(label, NULL)); 972 if (!channel.get()) { 973 LOG(LS_WARNING) << "Remote peer requested a DataChannel but" 974 << "CreateDataChannel failed."; 975 return; 976 } 977 channel->SetReceiveSsrc(remote_ssrc); 978 stream_observer_->OnAddDataChannel(channel); 979 } 980 981 void MediaStreamSignaling::OnDataTransportCreatedForSctp() { 982 SctpDataChannels::iterator it = sctp_data_channels_.begin(); 983 for (; it != sctp_data_channels_.end(); ++it) { 984 (*it)->OnTransportChannelCreated(); 985 } 986 } 987 988 void MediaStreamSignaling::OnDtlsRoleReadyForSctp(rtc::SSLRole role) { 989 SctpDataChannels::iterator it = sctp_data_channels_.begin(); 990 for (; it != sctp_data_channels_.end(); ++it) { 991 if ((*it)->id() < 0) { 992 int sid; 993 if (!AllocateSctpSid(role, &sid)) { 994 LOG(LS_ERROR) << "Failed to allocate SCTP sid."; 995 continue; 996 } 997 (*it)->SetSctpSid(sid); 998 } 999 } 1000 } 1001 1002 1003 void MediaStreamSignaling::OnRemoteSctpDataChannelClosed(uint32 sid) { 1004 int index = FindDataChannelBySid(sid); 1005 if (index < 0) { 1006 LOG(LS_WARNING) << "Unexpected sid " << sid 1007 << " of the remotely closed DataChannel."; 1008 return; 1009 } 1010 sctp_data_channels_[index]->Close(); 1011 } 1012 1013 const MediaStreamSignaling::TrackInfo* 1014 MediaStreamSignaling::FindTrackInfo( 1015 const MediaStreamSignaling::TrackInfos& infos, 1016 const std::string& stream_label, 1017 const std::string track_id) const { 1018 1019 for (TrackInfos::const_iterator it = infos.begin(); 1020 it != infos.end(); ++it) { 1021 if (it->stream_label == stream_label && it->track_id == track_id) 1022 return &*it; 1023 } 1024 return NULL; 1025 } 1026 1027 int MediaStreamSignaling::FindDataChannelBySid(int sid) const { 1028 for (size_t i = 0; i < sctp_data_channels_.size(); ++i) { 1029 if (sctp_data_channels_[i]->id() == sid) { 1030 return static_cast<int>(i); 1031 } 1032 } 1033 return -1; 1034 } 1035 1036 } // namespace webrtc 1037