1 /* 2 * libjingle 3 * Copyright 2012, Google Inc. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions are met: 7 * 8 * 1. Redistributions of source code must retain the above copyright notice, 9 * this list of conditions and the following disclaimer. 10 * 2. Redistributions in binary form must reproduce the above copyright notice, 11 * this list of conditions and the following disclaimer in the documentation 12 * and/or other materials provided with the distribution. 13 * 3. The name of the author may not be used to endorse or promote products 14 * derived from this software without specific prior written permission. 15 * 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED 17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO 19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 */ 27 28 #include <string> 29 30 #include "talk/app/webrtc/audiotrack.h" 31 #include "talk/app/webrtc/mediastream.h" 32 #include "talk/app/webrtc/mediastreamsignaling.h" 33 #include "talk/app/webrtc/streamcollection.h" 34 #include "talk/app/webrtc/test/fakeconstraints.h" 35 #include "talk/app/webrtc/videotrack.h" 36 #include "talk/base/gunit.h" 37 #include "talk/base/scoped_ptr.h" 38 #include "talk/base/stringutils.h" 39 #include "talk/base/thread.h" 40 #include "talk/p2p/base/constants.h" 41 #include "talk/p2p/base/sessiondescription.h" 42 43 static const char kStreams[][8] = {"stream1", "stream2"}; 44 static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"}; 45 static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"}; 46 47 using webrtc::AudioTrack; 48 using webrtc::AudioTrackInterface; 49 using webrtc::AudioTrackVector; 50 using webrtc::VideoTrack; 51 using webrtc::VideoTrackInterface; 52 using webrtc::VideoTrackVector; 53 using webrtc::DataChannelInterface; 54 using webrtc::FakeConstraints; 55 using webrtc::IceCandidateInterface; 56 using webrtc::MediaConstraintsInterface; 57 using webrtc::MediaStreamInterface; 58 using webrtc::MediaStreamTrackInterface; 59 using webrtc::SdpParseError; 60 using webrtc::SessionDescriptionInterface; 61 using webrtc::StreamCollection; 62 using webrtc::StreamCollectionInterface; 63 64 // Reference SDP with a MediaStream with label "stream1" and audio track with 65 // id "audio_1" and a video track with id "video_1; 66 static const char kSdpStringWithStream1[] = 67 "v=0\r\n" 68 "o=- 0 0 IN IP4 127.0.0.1\r\n" 69 "s=-\r\n" 70 "t=0 0\r\n" 71 "m=audio 1 RTP/AVPF 103\r\n" 72 "a=mid:audio\r\n" 73 "a=rtpmap:103 ISAC/16000\r\n" 74 "a=ssrc:1 cname:stream1\r\n" 75 "a=ssrc:1 mslabel:stream1\r\n" 76 "a=ssrc:1 label:audiotrack0\r\n" 77 "m=video 1 RTP/AVPF 120\r\n" 78 "a=mid:video\r\n" 79 "a=rtpmap:120 VP8/90000\r\n" 80 "a=ssrc:2 cname:stream1\r\n" 81 "a=ssrc:2 mslabel:stream1\r\n" 82 "a=ssrc:2 label:videotrack0\r\n"; 83 84 // Reference SDP with two MediaStreams with label "stream1" and "stream2. Each 85 // MediaStreams have one audio track and one video track. 86 // This uses MSID. 87 static const char kSdpStringWith2Stream[] = 88 "v=0\r\n" 89 "o=- 0 0 IN IP4 127.0.0.1\r\n" 90 "s=-\r\n" 91 "t=0 0\r\n" 92 "a=msid-semantic: WMS stream1 stream2\r\n" 93 "m=audio 1 RTP/AVPF 103\r\n" 94 "a=mid:audio\r\n" 95 "a=rtpmap:103 ISAC/16000\r\n" 96 "a=ssrc:1 cname:stream1\r\n" 97 "a=ssrc:1 msid:stream1 audiotrack0\r\n" 98 "a=ssrc:3 cname:stream2\r\n" 99 "a=ssrc:3 msid:stream2 audiotrack1\r\n" 100 "m=video 1 RTP/AVPF 120\r\n" 101 "a=mid:video\r\n" 102 "a=rtpmap:120 VP8/0\r\n" 103 "a=ssrc:2 cname:stream1\r\n" 104 "a=ssrc:2 msid:stream1 videotrack0\r\n" 105 "a=ssrc:4 cname:stream2\r\n" 106 "a=ssrc:4 msid:stream2 videotrack1\r\n"; 107 108 // Reference SDP without MediaStreams. Msid is not supported. 109 static const char kSdpStringWithoutStreams[] = 110 "v=0\r\n" 111 "o=- 0 0 IN IP4 127.0.0.1\r\n" 112 "s=-\r\n" 113 "t=0 0\r\n" 114 "m=audio 1 RTP/AVPF 103\r\n" 115 "a=mid:audio\r\n" 116 "a=rtpmap:103 ISAC/16000\r\n" 117 "m=video 1 RTP/AVPF 120\r\n" 118 "a=mid:video\r\n" 119 "a=rtpmap:120 VP8/90000\r\n"; 120 121 // Reference SDP without MediaStreams. Msid is supported. 122 static const char kSdpStringWithMsidWithoutStreams[] = 123 "v=0\r\n" 124 "o=- 0 0 IN IP4 127.0.0.1\r\n" 125 "s=-\r\n" 126 "t=0 0\r\n" 127 "a:msid-semantic: WMS\r\n" 128 "m=audio 1 RTP/AVPF 103\r\n" 129 "a=mid:audio\r\n" 130 "a=rtpmap:103 ISAC/16000\r\n" 131 "m=video 1 RTP/AVPF 120\r\n" 132 "a=mid:video\r\n" 133 "a=rtpmap:120 VP8/90000\r\n"; 134 135 // Reference SDP without MediaStreams and audio only. 136 static const char kSdpStringWithoutStreamsAudioOnly[] = 137 "v=0\r\n" 138 "o=- 0 0 IN IP4 127.0.0.1\r\n" 139 "s=-\r\n" 140 "t=0 0\r\n" 141 "m=audio 1 RTP/AVPF 103\r\n" 142 "a=mid:audio\r\n" 143 "a=rtpmap:103 ISAC/16000\r\n"; 144 145 static const char kSdpStringInit[] = 146 "v=0\r\n" 147 "o=- 0 0 IN IP4 127.0.0.1\r\n" 148 "s=-\r\n" 149 "t=0 0\r\n" 150 "a=msid-semantic: WMS\r\n"; 151 152 static const char kSdpStringAudio[] = 153 "m=audio 1 RTP/AVPF 103\r\n" 154 "a=mid:audio\r\n" 155 "a=rtpmap:103 ISAC/16000\r\n"; 156 157 static const char kSdpStringVideo[] = 158 "m=video 1 RTP/AVPF 120\r\n" 159 "a=mid:video\r\n" 160 "a=rtpmap:120 VP8/90000\r\n"; 161 162 static const char kSdpStringMs1Audio0[] = 163 "a=ssrc:1 cname:stream1\r\n" 164 "a=ssrc:1 msid:stream1 audiotrack0\r\n"; 165 166 static const char kSdpStringMs1Video0[] = 167 "a=ssrc:2 cname:stream1\r\n" 168 "a=ssrc:2 msid:stream1 videotrack0\r\n"; 169 170 static const char kSdpStringMs1Audio1[] = 171 "a=ssrc:3 cname:stream1\r\n" 172 "a=ssrc:3 msid:stream1 audiotrack1\r\n"; 173 174 static const char kSdpStringMs1Video1[] = 175 "a=ssrc:4 cname:stream1\r\n" 176 "a=ssrc:4 msid:stream1 videotrack1\r\n"; 177 178 // Verifies that |options| contain all tracks in |collection| and that 179 // the |options| has set the the has_audio and has_video flags correct. 180 static void VerifyMediaOptions(StreamCollectionInterface* collection, 181 const cricket::MediaSessionOptions& options) { 182 if (!collection) { 183 return; 184 } 185 186 size_t stream_index = 0; 187 for (size_t i = 0; i < collection->count(); ++i) { 188 MediaStreamInterface* stream = collection->at(i); 189 AudioTrackVector audio_tracks = stream->GetAudioTracks(); 190 ASSERT_GE(options.streams.size(), stream_index + audio_tracks.size()); 191 for (size_t j = 0; j < audio_tracks.size(); ++j) { 192 webrtc::AudioTrackInterface* audio = audio_tracks[j]; 193 EXPECT_EQ(options.streams[stream_index].sync_label, stream->label()); 194 EXPECT_EQ(options.streams[stream_index++].id, audio->id()); 195 EXPECT_TRUE(options.has_audio); 196 } 197 VideoTrackVector video_tracks = stream->GetVideoTracks(); 198 ASSERT_GE(options.streams.size(), stream_index + video_tracks.size()); 199 for (size_t j = 0; j < video_tracks.size(); ++j) { 200 webrtc::VideoTrackInterface* video = video_tracks[j]; 201 EXPECT_EQ(options.streams[stream_index].sync_label, stream->label()); 202 EXPECT_EQ(options.streams[stream_index++].id, video->id()); 203 EXPECT_TRUE(options.has_video); 204 } 205 } 206 } 207 208 static bool CompareStreamCollections(StreamCollectionInterface* s1, 209 StreamCollectionInterface* s2) { 210 if (s1 == NULL || s2 == NULL || s1->count() != s2->count()) 211 return false; 212 213 for (size_t i = 0; i != s1->count(); ++i) { 214 if (s1->at(i)->label() != s2->at(i)->label()) 215 return false; 216 webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks(); 217 webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks(); 218 webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks(); 219 webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks(); 220 221 if (audio_tracks1.size() != audio_tracks2.size()) 222 return false; 223 for (size_t j = 0; j != audio_tracks1.size(); ++j) { 224 if (audio_tracks1[j]->id() != audio_tracks2[j]->id()) 225 return false; 226 } 227 if (video_tracks1.size() != video_tracks2.size()) 228 return false; 229 for (size_t j = 0; j != video_tracks1.size(); ++j) { 230 if (video_tracks1[j]->id() != video_tracks2[j]->id()) 231 return false; 232 } 233 } 234 return true; 235 } 236 237 class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver { 238 public: 239 MockSignalingObserver() 240 : remote_media_streams_(StreamCollection::Create()) { 241 } 242 243 virtual ~MockSignalingObserver() { 244 } 245 246 // New remote stream have been discovered. 247 virtual void OnAddRemoteStream(MediaStreamInterface* remote_stream) { 248 remote_media_streams_->AddStream(remote_stream); 249 } 250 251 // Remote stream is no longer available. 252 virtual void OnRemoveRemoteStream(MediaStreamInterface* remote_stream) { 253 remote_media_streams_->RemoveStream(remote_stream); 254 } 255 256 virtual void OnAddDataChannel(DataChannelInterface* data_channel) { 257 } 258 259 virtual void OnAddLocalAudioTrack(MediaStreamInterface* stream, 260 AudioTrackInterface* audio_track, 261 uint32 ssrc) { 262 AddTrack(&local_audio_tracks_, stream, audio_track, ssrc); 263 } 264 265 virtual void OnAddLocalVideoTrack(MediaStreamInterface* stream, 266 VideoTrackInterface* video_track, 267 uint32 ssrc) { 268 AddTrack(&local_video_tracks_, stream, video_track, ssrc); 269 } 270 271 virtual void OnRemoveLocalAudioTrack(MediaStreamInterface* stream, 272 AudioTrackInterface* audio_track) { 273 RemoveTrack(&local_audio_tracks_, stream, audio_track); 274 } 275 276 virtual void OnRemoveLocalVideoTrack(MediaStreamInterface* stream, 277 VideoTrackInterface* video_track) { 278 RemoveTrack(&local_video_tracks_, stream, video_track); 279 } 280 281 virtual void OnAddRemoteAudioTrack(MediaStreamInterface* stream, 282 AudioTrackInterface* audio_track, 283 uint32 ssrc) { 284 AddTrack(&remote_audio_tracks_, stream, audio_track, ssrc); 285 } 286 287 virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream, 288 VideoTrackInterface* video_track, 289 uint32 ssrc) { 290 AddTrack(&remote_video_tracks_, stream, video_track, ssrc); 291 } 292 293 virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream, 294 AudioTrackInterface* audio_track) { 295 RemoveTrack(&remote_audio_tracks_, stream, audio_track); 296 } 297 298 virtual void OnRemoveRemoteVideoTrack(MediaStreamInterface* stream, 299 VideoTrackInterface* video_track) { 300 RemoveTrack(&remote_video_tracks_, stream, video_track); 301 } 302 303 virtual void OnRemoveLocalStream(MediaStreamInterface* stream) { 304 } 305 306 MediaStreamInterface* RemoteStream(const std::string& label) { 307 return remote_media_streams_->find(label); 308 } 309 310 StreamCollectionInterface* remote_streams() const { 311 return remote_media_streams_; 312 } 313 314 size_t NumberOfRemoteAudioTracks() { return remote_audio_tracks_.size(); } 315 316 void VerifyRemoteAudioTrack(const std::string& stream_label, 317 const std::string& track_id, 318 uint32 ssrc) { 319 VerifyTrack(remote_audio_tracks_, stream_label, track_id, ssrc); 320 } 321 322 size_t NumberOfRemoteVideoTracks() { return remote_video_tracks_.size(); } 323 324 void VerifyRemoteVideoTrack(const std::string& stream_label, 325 const std::string& track_id, 326 uint32 ssrc) { 327 VerifyTrack(remote_video_tracks_, stream_label, track_id, ssrc); 328 } 329 330 size_t NumberOfLocalAudioTracks() { return local_audio_tracks_.size(); } 331 void VerifyLocalAudioTrack(const std::string& stream_label, 332 const std::string& track_id, 333 uint32 ssrc) { 334 VerifyTrack(local_audio_tracks_, stream_label, track_id, ssrc); 335 } 336 337 size_t NumberOfLocalVideoTracks() { return local_video_tracks_.size(); } 338 339 void VerifyLocalVideoTrack(const std::string& stream_label, 340 const std::string& track_id, 341 uint32 ssrc) { 342 VerifyTrack(local_video_tracks_, stream_label, track_id, ssrc); 343 } 344 345 private: 346 struct TrackInfo { 347 TrackInfo() {} 348 TrackInfo(const std::string& stream_label, const std::string track_id, 349 uint32 ssrc) 350 : stream_label(stream_label), 351 track_id(track_id), 352 ssrc(ssrc) { 353 } 354 std::string stream_label; 355 std::string track_id; 356 uint32 ssrc; 357 }; 358 typedef std::map<std::string, TrackInfo> TrackInfos; 359 360 void AddTrack(TrackInfos* track_infos, MediaStreamInterface* stream, 361 MediaStreamTrackInterface* track, 362 uint32 ssrc) { 363 (*track_infos)[track->id()] = TrackInfo(stream->label(), track->id(), 364 ssrc); 365 } 366 367 void RemoveTrack(TrackInfos* track_infos, MediaStreamInterface* stream, 368 MediaStreamTrackInterface* track) { 369 TrackInfos::iterator it = track_infos->find(track->id()); 370 ASSERT_TRUE(it != track_infos->end()); 371 ASSERT_EQ(it->second.stream_label, stream->label()); 372 track_infos->erase(it); 373 } 374 375 void VerifyTrack(const TrackInfos& track_infos, 376 const std::string& stream_label, 377 const std::string& track_id, 378 uint32 ssrc) { 379 TrackInfos::const_iterator it = track_infos.find(track_id); 380 ASSERT_TRUE(it != track_infos.end()); 381 EXPECT_EQ(stream_label, it->second.stream_label); 382 EXPECT_EQ(ssrc, it->second.ssrc); 383 } 384 385 TrackInfos remote_audio_tracks_; 386 TrackInfos remote_video_tracks_; 387 TrackInfos local_audio_tracks_; 388 TrackInfos local_video_tracks_; 389 390 talk_base::scoped_refptr<StreamCollection> remote_media_streams_; 391 }; 392 393 class MediaStreamSignalingForTest : public webrtc::MediaStreamSignaling { 394 public: 395 explicit MediaStreamSignalingForTest(MockSignalingObserver* observer) 396 : webrtc::MediaStreamSignaling(talk_base::Thread::Current(), observer) { 397 }; 398 399 using webrtc::MediaStreamSignaling::GetOptionsForOffer; 400 using webrtc::MediaStreamSignaling::GetOptionsForAnswer; 401 using webrtc::MediaStreamSignaling::OnRemoteDescriptionChanged; 402 using webrtc::MediaStreamSignaling::remote_streams; 403 }; 404 405 class MediaStreamSignalingTest: public testing::Test { 406 protected: 407 virtual void SetUp() { 408 observer_.reset(new MockSignalingObserver()); 409 signaling_.reset(new MediaStreamSignalingForTest(observer_.get())); 410 } 411 412 // Create a collection of streams. 413 // CreateStreamCollection(1) creates a collection that 414 // correspond to kSdpString1. 415 // CreateStreamCollection(2) correspond to kSdpString2. 416 talk_base::scoped_refptr<StreamCollection> 417 CreateStreamCollection(int number_of_streams) { 418 talk_base::scoped_refptr<StreamCollection> local_collection( 419 StreamCollection::Create()); 420 421 for (int i = 0; i < number_of_streams; ++i) { 422 talk_base::scoped_refptr<webrtc::MediaStreamInterface> stream( 423 webrtc::MediaStream::Create(kStreams[i])); 424 425 // Add a local audio track. 426 talk_base::scoped_refptr<webrtc::AudioTrackInterface> audio_track( 427 webrtc::AudioTrack::Create(kAudioTracks[i], NULL)); 428 stream->AddTrack(audio_track); 429 430 // Add a local video track. 431 talk_base::scoped_refptr<webrtc::VideoTrackInterface> video_track( 432 webrtc::VideoTrack::Create(kVideoTracks[i], NULL)); 433 stream->AddTrack(video_track); 434 435 local_collection->AddStream(stream); 436 } 437 return local_collection; 438 } 439 440 // This functions Creates a MediaStream with label kStreams[0] and 441 // |number_of_audio_tracks| and |number_of_video_tracks| tracks and the 442 // corresponding SessionDescriptionInterface. The SessionDescriptionInterface 443 // is returned in |desc| and the MediaStream is stored in 444 // |reference_collection_| 445 void CreateSessionDescriptionAndReference( 446 size_t number_of_audio_tracks, 447 size_t number_of_video_tracks, 448 SessionDescriptionInterface** desc) { 449 ASSERT_TRUE(desc != NULL); 450 ASSERT_LE(number_of_audio_tracks, 2u); 451 ASSERT_LE(number_of_video_tracks, 2u); 452 453 reference_collection_ = StreamCollection::Create(); 454 std::string sdp_ms1 = std::string(kSdpStringInit); 455 456 std::string mediastream_label = kStreams[0]; 457 458 talk_base::scoped_refptr<webrtc::MediaStreamInterface> stream( 459 webrtc::MediaStream::Create(mediastream_label)); 460 reference_collection_->AddStream(stream); 461 462 if (number_of_audio_tracks > 0) { 463 sdp_ms1 += std::string(kSdpStringAudio); 464 sdp_ms1 += std::string(kSdpStringMs1Audio0); 465 AddAudioTrack(kAudioTracks[0], stream); 466 } 467 if (number_of_audio_tracks > 1) { 468 sdp_ms1 += kSdpStringMs1Audio1; 469 AddAudioTrack(kAudioTracks[1], stream); 470 } 471 472 if (number_of_video_tracks > 0) { 473 sdp_ms1 += std::string(kSdpStringVideo); 474 sdp_ms1 += std::string(kSdpStringMs1Video0); 475 AddVideoTrack(kVideoTracks[0], stream); 476 } 477 if (number_of_video_tracks > 1) { 478 sdp_ms1 += kSdpStringMs1Video1; 479 AddVideoTrack(kVideoTracks[1], stream); 480 } 481 482 *desc = webrtc::CreateSessionDescription( 483 SessionDescriptionInterface::kOffer, sdp_ms1, NULL); 484 } 485 486 void AddAudioTrack(const std::string& track_id, 487 MediaStreamInterface* stream) { 488 talk_base::scoped_refptr<webrtc::AudioTrackInterface> audio_track( 489 webrtc::AudioTrack::Create(track_id, NULL)); 490 ASSERT_TRUE(stream->AddTrack(audio_track)); 491 } 492 493 void AddVideoTrack(const std::string& track_id, 494 MediaStreamInterface* stream) { 495 talk_base::scoped_refptr<webrtc::VideoTrackInterface> video_track( 496 webrtc::VideoTrack::Create(track_id, NULL)); 497 ASSERT_TRUE(stream->AddTrack(video_track)); 498 } 499 500 talk_base::scoped_refptr<StreamCollection> reference_collection_; 501 talk_base::scoped_ptr<MockSignalingObserver> observer_; 502 talk_base::scoped_ptr<MediaStreamSignalingForTest> signaling_; 503 }; 504 505 // Test that a MediaSessionOptions is created for an offer if 506 // kOfferToReceiveAudio and kOfferToReceiveVideo constraints are set but no 507 // MediaStreams are sent. 508 TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudioVideo) { 509 FakeConstraints constraints; 510 constraints.SetMandatoryReceiveAudio(true); 511 constraints.SetMandatoryReceiveVideo(true); 512 cricket::MediaSessionOptions options; 513 EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options)); 514 EXPECT_TRUE(options.has_audio); 515 EXPECT_TRUE(options.has_video); 516 EXPECT_TRUE(options.bundle_enabled); 517 } 518 519 // Test that a correct MediaSessionOptions is created for an offer if 520 // kOfferToReceiveAudio constraints is set but no MediaStreams are sent. 521 TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudio) { 522 FakeConstraints constraints; 523 constraints.SetMandatoryReceiveAudio(true); 524 cricket::MediaSessionOptions options; 525 EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options)); 526 EXPECT_TRUE(options.has_audio); 527 EXPECT_FALSE(options.has_video); 528 EXPECT_TRUE(options.bundle_enabled); 529 } 530 531 // Test that a correct MediaSessionOptions is created for an offer if 532 // no constraints or MediaStreams are sent. 533 TEST_F(MediaStreamSignalingTest, GetDefaultMediaSessionOptionsForOffer) { 534 cricket::MediaSessionOptions options; 535 EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options)); 536 EXPECT_TRUE(options.has_audio); 537 EXPECT_FALSE(options.has_video); 538 EXPECT_TRUE(options.bundle_enabled); 539 } 540 541 // Test that a correct MediaSessionOptions is created for an offer if 542 // kOfferToReceiveVideo constraints is set but no MediaStreams are sent. 543 TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithVideo) { 544 FakeConstraints constraints; 545 constraints.SetMandatoryReceiveAudio(false); 546 constraints.SetMandatoryReceiveVideo(true); 547 cricket::MediaSessionOptions options; 548 EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options)); 549 EXPECT_FALSE(options.has_audio); 550 EXPECT_TRUE(options.has_video); 551 EXPECT_TRUE(options.bundle_enabled); 552 } 553 554 // Test that a correct MediaSessionOptions is created for an offer if 555 // kUseRtpMux constraints is set to false. 556 TEST_F(MediaStreamSignalingTest, 557 GetMediaSessionOptionsForOfferWithBundleDisabled) { 558 FakeConstraints constraints; 559 constraints.SetMandatoryReceiveAudio(true); 560 constraints.SetMandatoryReceiveVideo(true); 561 constraints.SetMandatoryUseRtpMux(false); 562 cricket::MediaSessionOptions options; 563 EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options)); 564 EXPECT_TRUE(options.has_audio); 565 EXPECT_TRUE(options.has_video); 566 EXPECT_FALSE(options.bundle_enabled); 567 } 568 569 // Test that a correct MediaSessionOptions is created to restart ice if 570 // kIceRestart constraints is set. It also tests that subsequent 571 // MediaSessionOptions don't have |transport_options.ice_restart| set. 572 TEST_F(MediaStreamSignalingTest, 573 GetMediaSessionOptionsForOfferWithIceRestart) { 574 FakeConstraints constraints; 575 constraints.SetMandatoryIceRestart(true); 576 cricket::MediaSessionOptions options; 577 EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options)); 578 EXPECT_TRUE(options.transport_options.ice_restart); 579 580 EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options)); 581 EXPECT_FALSE(options.transport_options.ice_restart); 582 } 583 584 // Test that GetMediaSessionOptionsForOffer and GetOptionsForAnswer work as 585 // expected if unknown constraints are used. 586 TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsWithBadConstraints) { 587 FakeConstraints mandatory; 588 mandatory.AddMandatory("bad_key", "bad_value"); 589 cricket::MediaSessionOptions options; 590 EXPECT_FALSE(signaling_->GetOptionsForOffer(&mandatory, &options)); 591 EXPECT_FALSE(signaling_->GetOptionsForAnswer(&mandatory, &options)); 592 593 FakeConstraints optional; 594 optional.AddOptional("bad_key", "bad_value"); 595 EXPECT_TRUE(signaling_->GetOptionsForOffer(&optional, &options)); 596 EXPECT_TRUE(signaling_->GetOptionsForAnswer(&optional, &options)); 597 } 598 599 // Test that a correct MediaSessionOptions are created for an offer if 600 // a MediaStream is sent and later updated with a new track. 601 // MediaConstraints are not used. 602 TEST_F(MediaStreamSignalingTest, AddTrackToLocalMediaStream) { 603 talk_base::scoped_refptr<StreamCollection> local_streams( 604 CreateStreamCollection(1)); 605 MediaStreamInterface* local_stream = local_streams->at(0); 606 EXPECT_TRUE(signaling_->AddLocalStream(local_stream)); 607 cricket::MediaSessionOptions options; 608 EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options)); 609 VerifyMediaOptions(local_streams, options); 610 611 cricket::MediaSessionOptions updated_options; 612 local_stream->AddTrack(AudioTrack::Create(kAudioTracks[1], NULL)); 613 EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options)); 614 VerifyMediaOptions(local_streams, options); 615 } 616 617 // Test that the MediaConstraints in an answer don't affect if audio and video 618 // is offered in an offer but that if kOfferToReceiveAudio or 619 // kOfferToReceiveVideo constraints are true in an offer, the media type will be 620 // included in subsequent answers. 621 TEST_F(MediaStreamSignalingTest, MediaConstraintsInAnswer) { 622 FakeConstraints answer_c; 623 answer_c.SetMandatoryReceiveAudio(true); 624 answer_c.SetMandatoryReceiveVideo(true); 625 626 cricket::MediaSessionOptions answer_options; 627 EXPECT_TRUE(signaling_->GetOptionsForAnswer(&answer_c, &answer_options)); 628 EXPECT_TRUE(answer_options.has_audio); 629 EXPECT_TRUE(answer_options.has_video); 630 631 FakeConstraints offer_c; 632 offer_c.SetMandatoryReceiveAudio(false); 633 offer_c.SetMandatoryReceiveVideo(false); 634 635 cricket::MediaSessionOptions offer_options; 636 EXPECT_TRUE(signaling_->GetOptionsForOffer(&offer_c, &offer_options)); 637 EXPECT_FALSE(offer_options.has_audio); 638 EXPECT_FALSE(offer_options.has_video); 639 640 FakeConstraints updated_offer_c; 641 updated_offer_c.SetMandatoryReceiveAudio(true); 642 updated_offer_c.SetMandatoryReceiveVideo(true); 643 644 cricket::MediaSessionOptions updated_offer_options; 645 EXPECT_TRUE(signaling_->GetOptionsForOffer(&updated_offer_c, 646 &updated_offer_options)); 647 EXPECT_TRUE(updated_offer_options.has_audio); 648 EXPECT_TRUE(updated_offer_options.has_video); 649 650 // Since an offer has been created with both audio and video, subsequent 651 // offers and answers should contain both audio and video. 652 // Answers will only contain the media types that exist in the offer 653 // regardless of the value of |updated_answer_options.has_audio| and 654 // |updated_answer_options.has_video|. 655 FakeConstraints updated_answer_c; 656 answer_c.SetMandatoryReceiveAudio(false); 657 answer_c.SetMandatoryReceiveVideo(false); 658 659 cricket::MediaSessionOptions updated_answer_options; 660 EXPECT_TRUE(signaling_->GetOptionsForAnswer(&updated_answer_c, 661 &updated_answer_options)); 662 EXPECT_TRUE(updated_answer_options.has_audio); 663 EXPECT_TRUE(updated_answer_options.has_video); 664 665 EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, 666 &updated_offer_options)); 667 EXPECT_TRUE(updated_offer_options.has_audio); 668 EXPECT_TRUE(updated_offer_options.has_video); 669 } 670 671 // This test verifies that the remote MediaStreams corresponding to a received 672 // SDP string is created. In this test the two separate MediaStreams are 673 // signaled. 674 TEST_F(MediaStreamSignalingTest, UpdateRemoteStreams) { 675 talk_base::scoped_ptr<SessionDescriptionInterface> desc( 676 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, 677 kSdpStringWithStream1, NULL)); 678 EXPECT_TRUE(desc != NULL); 679 signaling_->OnRemoteDescriptionChanged(desc.get()); 680 681 talk_base::scoped_refptr<StreamCollection> reference( 682 CreateStreamCollection(1)); 683 EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), 684 reference.get())); 685 EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), 686 reference.get())); 687 EXPECT_EQ(1u, observer_->NumberOfRemoteAudioTracks()); 688 observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1); 689 EXPECT_EQ(1u, observer_->NumberOfRemoteVideoTracks()); 690 observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2); 691 692 // Create a session description based on another SDP with another 693 // MediaStream. 694 talk_base::scoped_ptr<SessionDescriptionInterface> update_desc( 695 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, 696 kSdpStringWith2Stream, NULL)); 697 EXPECT_TRUE(update_desc != NULL); 698 signaling_->OnRemoteDescriptionChanged(update_desc.get()); 699 700 talk_base::scoped_refptr<StreamCollection> reference2( 701 CreateStreamCollection(2)); 702 EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), 703 reference2.get())); 704 EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), 705 reference2.get())); 706 707 EXPECT_EQ(2u, observer_->NumberOfRemoteAudioTracks()); 708 observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1); 709 observer_->VerifyRemoteAudioTrack(kStreams[1], kAudioTracks[1], 3); 710 EXPECT_EQ(2u, observer_->NumberOfRemoteVideoTracks()); 711 observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2); 712 observer_->VerifyRemoteVideoTrack(kStreams[1], kVideoTracks[1], 4); 713 } 714 715 // This test verifies that the remote MediaStreams corresponding to a received 716 // SDP string is created. In this test the same remote MediaStream is signaled 717 // but MediaStream tracks are added and removed. 718 TEST_F(MediaStreamSignalingTest, AddRemoveTrackFromExistingRemoteMediaStream) { 719 talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms1; 720 CreateSessionDescriptionAndReference(1, 1, desc_ms1.use()); 721 signaling_->OnRemoteDescriptionChanged(desc_ms1.get()); 722 EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), 723 reference_collection_)); 724 725 // Add extra audio and video tracks to the same MediaStream. 726 talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms1_two_tracks; 727 CreateSessionDescriptionAndReference(2, 2, desc_ms1_two_tracks.use()); 728 signaling_->OnRemoteDescriptionChanged(desc_ms1_two_tracks.get()); 729 EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), 730 reference_collection_)); 731 EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), 732 reference_collection_)); 733 734 // Remove the extra audio and video tracks again. 735 talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms2; 736 CreateSessionDescriptionAndReference(1, 1, desc_ms2.use()); 737 signaling_->OnRemoteDescriptionChanged(desc_ms2.get()); 738 EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), 739 reference_collection_)); 740 EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), 741 reference_collection_)); 742 } 743 744 // This test that remote tracks are ended if a 745 // local session description is set that rejects the media content type. 746 TEST_F(MediaStreamSignalingTest, RejectMediaContent) { 747 talk_base::scoped_ptr<SessionDescriptionInterface> desc( 748 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, 749 kSdpStringWithStream1, NULL)); 750 EXPECT_TRUE(desc != NULL); 751 signaling_->OnRemoteDescriptionChanged(desc.get()); 752 753 ASSERT_EQ(1u, observer_->remote_streams()->count()); 754 MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); 755 ASSERT_EQ(1u, remote_stream->GetVideoTracks().size()); 756 ASSERT_EQ(1u, remote_stream->GetAudioTracks().size()); 757 758 talk_base::scoped_refptr<webrtc::VideoTrackInterface> remote_video = 759 remote_stream->GetVideoTracks()[0]; 760 EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_video->state()); 761 talk_base::scoped_refptr<webrtc::AudioTrackInterface> remote_audio = 762 remote_stream->GetAudioTracks()[0]; 763 EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state()); 764 765 cricket::ContentInfo* video_info = 766 desc->description()->GetContentByName("video"); 767 ASSERT_TRUE(video_info != NULL); 768 video_info->rejected = true; 769 signaling_->OnLocalDescriptionChanged(desc.get()); 770 EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state()); 771 EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state()); 772 773 cricket::ContentInfo* audio_info = 774 desc->description()->GetContentByName("audio"); 775 ASSERT_TRUE(audio_info != NULL); 776 audio_info->rejected = true; 777 signaling_->OnLocalDescriptionChanged(desc.get()); 778 EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state()); 779 } 780 781 // This tests that a default MediaStream is created if a remote session 782 // description doesn't contain any streams and no MSID support. 783 // It also tests that the default stream is updated if a video m-line is added 784 // in a subsequent session description. 785 TEST_F(MediaStreamSignalingTest, SdpWithoutMsidCreatesDefaultStream) { 786 talk_base::scoped_ptr<SessionDescriptionInterface> desc_audio_only( 787 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, 788 kSdpStringWithoutStreamsAudioOnly, 789 NULL)); 790 ASSERT_TRUE(desc_audio_only != NULL); 791 signaling_->OnRemoteDescriptionChanged(desc_audio_only.get()); 792 793 EXPECT_EQ(1u, signaling_->remote_streams()->count()); 794 ASSERT_EQ(1u, observer_->remote_streams()->count()); 795 MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); 796 797 EXPECT_EQ(1u, remote_stream->GetAudioTracks().size()); 798 EXPECT_EQ(0u, remote_stream->GetVideoTracks().size()); 799 EXPECT_EQ("default", remote_stream->label()); 800 801 talk_base::scoped_ptr<SessionDescriptionInterface> desc( 802 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, 803 kSdpStringWithoutStreams, NULL)); 804 ASSERT_TRUE(desc != NULL); 805 signaling_->OnRemoteDescriptionChanged(desc.get()); 806 EXPECT_EQ(1u, signaling_->remote_streams()->count()); 807 ASSERT_EQ(1u, remote_stream->GetAudioTracks().size()); 808 EXPECT_EQ("defaulta0", remote_stream->GetAudioTracks()[0]->id()); 809 ASSERT_EQ(1u, remote_stream->GetVideoTracks().size()); 810 EXPECT_EQ("defaultv0", remote_stream->GetVideoTracks()[0]->id()); 811 observer_->VerifyRemoteAudioTrack("default", "defaulta0", 0); 812 observer_->VerifyRemoteVideoTrack("default", "defaultv0", 0); 813 } 814 815 // This tests that a default MediaStream is created if the remote session 816 // description doesn't contain any streams and don't contain an indication if 817 // MSID is supported. 818 TEST_F(MediaStreamSignalingTest, 819 SdpWithoutMsidAndStreamsCreatesDefaultStream) { 820 talk_base::scoped_ptr<SessionDescriptionInterface> desc( 821 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, 822 kSdpStringWithoutStreams, 823 NULL)); 824 ASSERT_TRUE(desc != NULL); 825 signaling_->OnRemoteDescriptionChanged(desc.get()); 826 827 ASSERT_EQ(1u, observer_->remote_streams()->count()); 828 MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); 829 EXPECT_EQ(1u, remote_stream->GetAudioTracks().size()); 830 EXPECT_EQ(1u, remote_stream->GetVideoTracks().size()); 831 } 832 833 // This tests that a default MediaStream is not created if the remote session 834 // description doesn't contain any streams but does support MSID. 835 TEST_F(MediaStreamSignalingTest, SdpWitMsidDontCreatesDefaultStream) { 836 talk_base::scoped_ptr<SessionDescriptionInterface> desc_msid_without_streams( 837 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, 838 kSdpStringWithMsidWithoutStreams, 839 NULL)); 840 signaling_->OnRemoteDescriptionChanged(desc_msid_without_streams.get()); 841 EXPECT_EQ(0u, observer_->remote_streams()->count()); 842 } 843 844 // This test that a default MediaStream is not created if a remote session 845 // description is updated to not have any MediaStreams. 846 TEST_F(MediaStreamSignalingTest, VerifyDefaultStreamIsNotCreated) { 847 talk_base::scoped_ptr<SessionDescriptionInterface> desc( 848 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, 849 kSdpStringWithStream1, 850 NULL)); 851 ASSERT_TRUE(desc != NULL); 852 signaling_->OnRemoteDescriptionChanged(desc.get()); 853 talk_base::scoped_refptr<StreamCollection> reference( 854 CreateStreamCollection(1)); 855 EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), 856 reference.get())); 857 858 talk_base::scoped_ptr<SessionDescriptionInterface> desc_without_streams( 859 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, 860 kSdpStringWithoutStreams, 861 NULL)); 862 signaling_->OnRemoteDescriptionChanged(desc_without_streams.get()); 863 EXPECT_EQ(0u, observer_->remote_streams()->count()); 864 } 865 866 // This test that the correct MediaStreamSignalingObserver methods are called 867 // when MediaStreamSignaling::OnLocalDescriptionChanged is called with an 868 // updated local session description. 869 TEST_F(MediaStreamSignalingTest, LocalDescriptionChanged) { 870 talk_base::scoped_ptr<SessionDescriptionInterface> desc_1; 871 CreateSessionDescriptionAndReference(2, 2, desc_1.use()); 872 873 signaling_->AddLocalStream(reference_collection_->at(0)); 874 signaling_->OnLocalDescriptionChanged(desc_1.get()); 875 EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks()); 876 EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks()); 877 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); 878 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); 879 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3); 880 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4); 881 882 // Remove an audio and video track. 883 talk_base::scoped_ptr<SessionDescriptionInterface> desc_2; 884 CreateSessionDescriptionAndReference(1, 1, desc_2.use()); 885 signaling_->OnLocalDescriptionChanged(desc_2.get()); 886 EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); 887 EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); 888 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); 889 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); 890 } 891 892 // This test that the correct MediaStreamSignalingObserver methods are called 893 // when MediaStreamSignaling::AddLocalStream is called after 894 // MediaStreamSignaling::OnLocalDescriptionChanged is called. 895 TEST_F(MediaStreamSignalingTest, AddLocalStreamAfterLocalDescriptionChanged) { 896 talk_base::scoped_ptr<SessionDescriptionInterface> desc_1; 897 CreateSessionDescriptionAndReference(2, 2, desc_1.use()); 898 899 signaling_->OnLocalDescriptionChanged(desc_1.get()); 900 EXPECT_EQ(0u, observer_->NumberOfLocalAudioTracks()); 901 EXPECT_EQ(0u, observer_->NumberOfLocalVideoTracks()); 902 903 signaling_->AddLocalStream(reference_collection_->at(0)); 904 EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks()); 905 EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks()); 906 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); 907 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); 908 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3); 909 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4); 910 } 911 912 // This test that the correct MediaStreamSignalingObserver methods are called 913 // if the ssrc on a local track is changed when 914 // MediaStreamSignaling::OnLocalDescriptionChanged is called. 915 TEST_F(MediaStreamSignalingTest, ChangeSsrcOnTrackInLocalSessionDescription) { 916 talk_base::scoped_ptr<SessionDescriptionInterface> desc; 917 CreateSessionDescriptionAndReference(1, 1, desc.use()); 918 919 signaling_->AddLocalStream(reference_collection_->at(0)); 920 signaling_->OnLocalDescriptionChanged(desc.get()); 921 EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); 922 EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); 923 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); 924 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); 925 926 // Change the ssrc of the audio and video track. 927 std::string sdp; 928 desc->ToString(&sdp); 929 std::string ssrc_org = "a=ssrc:1"; 930 std::string ssrc_to = "a=ssrc:97"; 931 talk_base::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), 932 ssrc_to.c_str(), ssrc_to.length(), 933 &sdp); 934 ssrc_org = "a=ssrc:2"; 935 ssrc_to = "a=ssrc:98"; 936 talk_base::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), 937 ssrc_to.c_str(), ssrc_to.length(), 938 &sdp); 939 talk_base::scoped_ptr<SessionDescriptionInterface> updated_desc( 940 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, 941 sdp, NULL)); 942 943 signaling_->OnLocalDescriptionChanged(updated_desc.get()); 944 EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); 945 EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); 946 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 97); 947 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 98); 948 } 949 950 951