1 <html> 2 <head> 3 <script type="text/javascript" src="webrtc_test_utilities.js"></script> 4 <script type="text/javascript" src="webrtc_test_audio.js"></script> 5 <script type="text/javascript"> 6 $ = function(id) { 7 return document.getElementById(id); 8 }; 9 10 var gFirstConnection = null; 11 var gSecondConnection = null; 12 var gTestWithoutMsid = false; 13 var gLocalStream = null; 14 var gSentTones = ''; 15 16 var gRemoteStreams = {}; 17 18 // Default transform functions, overridden by some test cases. 19 var transformSdp = function(sdp) { return sdp; }; 20 var transformRemoteSdp = function(sdp) { return sdp; }; 21 var transformCandidate = function(candidate) { return candidate; }; 22 var onLocalDescriptionError = function(error) { }; 23 24 // When using external SDES, the crypto key is chosen by javascript. 25 var EXTERNAL_SDES_LINES = { 26 'audio': 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 ' + 27 'inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR', 28 'video': 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 ' + 29 'inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj', 30 'data': 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 ' + 31 'inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj' 32 }; 33 34 // When using GICE, the ICE credentials can be chosen by javascript. 35 var EXTERNAL_GICE_UFRAG = '1234567890123456'; 36 var EXTERNAL_GICE_PWD = '123456789012345678901234'; 37 38 setAllEventsOccuredHandler(function() { 39 document.title = 'OK'; 40 }); 41 42 // Test that we can setup call with an audio and video track. 43 function call(constraints) { 44 createConnections(null); 45 navigator.webkitGetUserMedia(constraints, 46 addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); 47 waitForVideo('remote-view-1'); 48 waitForVideo('remote-view-2'); 49 } 50 51 // First calls without streams on any connections, and then adds a stream 52 // to peer connection 1 which gets sent to peer connection 2. We must wait 53 // for the first negotiation to complete before starting the second one, which 54 // is why we wait until the connection is stable before re-negotiating. 55 function callEmptyThenAddOneStreamAndRenegotiate(constraints) { 56 createConnections(null); 57 negotiate(); 58 waitForConnectionToStabilize(gFirstConnection); 59 navigator.webkitGetUserMedia(constraints, 60 addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError); 61 // Only the first connection is sending here. 62 waitForVideo('remote-view-2'); 63 } 64 65 // First makes a call between pc1 and pc2, and then makes a call between pc3 66 // and pc4 where the remote streams from pc1 and pc2 will be used as the local 67 // streams of pc3 and pc4. 68 function callAndForwardRemoteStream(constraints) { 69 createConnections(null); 70 navigator.webkitGetUserMedia(constraints, 71 addStreamToBothConnectionsAndNegotiate, 72 printGetUserMediaError); 73 var gotRemoteStream1 = false; 74 var gotRemoteStream2 = false; 75 76 var onRemoteStream1 = function() { 77 gotRemoteStream1 = true; 78 maybeCallEstablished(); 79 } 80 81 var onRemoteStream2 = function() { 82 gotRemoteStream2 = true; 83 maybeCallEstablished(); 84 } 85 86 var maybeCallEstablished = function() { 87 if (gotRemoteStream1 && gotRemoteStream2) { 88 onCallEstablished(); 89 } 90 } 91 92 var onCallEstablished = function() { 93 thirdConnection = createConnection(null, 'remote-view-3'); 94 thirdConnection.addStream(gRemoteStreams['remote-view-1']); 95 96 fourthConnection = createConnection(null, 'remote-view-4'); 97 fourthConnection.addStream(gRemoteStreams['remote-view-2']); 98 99 negotiateBetween(thirdConnection, fourthConnection); 100 101 waitForVideo('remote-view-3'); 102 waitForVideo('remote-view-4'); 103 } 104 105 // Do the forwarding after we have received video. 106 detectVideoPlaying('remote-view-1', onRemoteStream1); 107 detectVideoPlaying('remote-view-2', onRemoteStream2); 108 } 109 110 // Test that we can setup call with an audio and video track and 111 // simulate that the remote peer don't support MSID. 112 function callWithoutMsidAndBundle() { 113 createConnections(null); 114 transformSdp = removeBundle; 115 transformRemoteSdp = removeMsid; 116 gTestWithoutMsid = true; 117 navigator.webkitGetUserMedia({audio: true, video: true}, 118 addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); 119 waitForVideo('remote-view-1'); 120 waitForVideo('remote-view-2'); 121 } 122 123 // Test that we can't setup a call with an unsupported video codec 124 function negotiateUnsupportedVideoCodec() { 125 createConnections(null); 126 transformSdp = removeVideoCodec; 127 navigator.webkitGetUserMedia({audio: true, video: true}, 128 addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); 129 onLocalDescriptionError = function(error) { 130 var expectedMsg = 'SetLocalDescription failed: Failed to' + 131 ' update session state: ERROR_CONTENT'; 132 expectEquals(expectedMsg, error); 133 134 // Got the right message, test succeeded. 135 document.title = 'OK'; 136 }; 137 } 138 139 // Test that we can't setup a call if one peer does not support encryption 140 function negotiateNonCryptoCall() { 141 createConnections(null); 142 transformSdp = removeCrypto; 143 navigator.webkitGetUserMedia({audio: true, video: true}, 144 addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); 145 onLocalDescriptionError = function(error) { 146 var expectedMsg = 'SetLocalDescription failed: Called with a SDP without' 147 + ' crypto enabled.'; 148 expectEquals(expectedMsg, error); 149 150 // Got the right message, test succeeded. 151 document.title = 'OK'; 152 }; 153 } 154 155 // Test that we can setup call with legacy settings. 156 function callWithLegacySdp() { 157 transformSdp = function(sdp) { 158 return removeBundle(useGice(useExternalSdes(sdp))); 159 }; 160 transformCandidate = addGiceCredsToCandidate; 161 createConnections({ 162 'mandatory': {'RtpDataChannels': true, 'DtlsSrtpKeyAgreement': false} 163 }); 164 setupDataChannel({reliable: false}); 165 navigator.webkitGetUserMedia({audio: true, video: true}, 166 addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); 167 waitForVideo('remote-view-1'); 168 waitForVideo('remote-view-2'); 169 } 170 171 // Test only a data channel. 172 function callWithDataOnly() { 173 createConnections({optional:[{RtpDataChannels: true}]}); 174 setupDataChannel({reliable: false}); 175 negotiate(); 176 } 177 178 function callWithSctpDataOnly() { 179 createConnections({optional: [{DtlsSrtpKeyAgreement: true}]}); 180 setupSctpDataChannel({reliable: true}); 181 negotiate(); 182 } 183 184 // Test call with audio, video and a data channel. 185 function callWithDataAndMedia() { 186 createConnections({optional:[{RtpDataChannels: true}]}); 187 setupDataChannel({reliable: false}); 188 navigator.webkitGetUserMedia({audio: true, video: true}, 189 addStreamToBothConnectionsAndNegotiate, 190 printGetUserMediaError); 191 waitForVideo('remote-view-1'); 192 waitForVideo('remote-view-2'); 193 } 194 195 function callWithSctpDataAndMedia() { 196 createConnections({optional: [{DtlsSrtpKeyAgreement: true}]}); 197 setupSctpDataChannel({reliable: true}); 198 navigator.webkitGetUserMedia({audio: true, video: true}, 199 addStreamToBothConnectionsAndNegotiate, 200 printGetUserMediaError); 201 waitForVideo('remote-view-1'); 202 waitForVideo('remote-view-2'); 203 } 204 205 206 // Test call with a data channel and later add audio and video. 207 function callWithDataAndLaterAddMedia() { 208 createConnections({optional:[{RtpDataChannels: true}]}); 209 setupDataChannel({reliable: false}); 210 negotiate(); 211 212 // Set an event handler for when the data channel has been closed. 213 setAllEventsOccuredHandler(function() { 214 // When the video is flowing the test is done. 215 setAllEventsOccuredHandler(function() { 216 document.title = 'OK'; 217 }); 218 navigator.webkitGetUserMedia({audio: true, video: true}, 219 addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); 220 waitForVideo('remote-view-1'); 221 waitForVideo('remote-view-2'); 222 }); 223 } 224 225 // Test that we can setup call and send DTMF. 226 function callAndSendDtmf(tones) { 227 createConnections(null); 228 navigator.webkitGetUserMedia({audio: true, video: true}, 229 addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); 230 var onCallEstablished = function() { 231 // Send DTMF tones. 232 var localAudioTrack = gLocalStream.getAudioTracks()[0]; 233 var dtmfSender = gFirstConnection.createDTMFSender(localAudioTrack); 234 dtmfSender.ontonechange = onToneChange; 235 dtmfSender.insertDTMF(tones); 236 // Wait for the DTMF tones callback. 237 document.title = 'Waiting for dtmf...'; 238 addExpectedEvent(); 239 var waitDtmf = setInterval(function() { 240 if (gSentTones == tones) { 241 clearInterval(waitDtmf); 242 eventOccured(); 243 } 244 }, 100); 245 } 246 247 // Do the DTMF test after we have received video. 248 detectVideoPlaying('remote-view-2', onCallEstablished); 249 } 250 251 //TODO(phoglund): do this for all tests on android if this works on bots. 252 /** @private */ 253 function forceIsac16k_(sdp) { 254 // Remove all other codecs (not the video codecs though). 255 sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g, 256 'm=audio $1 RTP/SAVPF 103\r\n'); 257 sdp = sdp.replace('a=fmtp:111 minptime=10', 'a=fmtp:103 minptime=10'); 258 sdp = sdp.replace(/a=rtpmap:(?!103)\d{1,3} (?!VP8|red|ulpfec).*\r\n/g, ''); 259 return sdp; 260 } 261 262 function callAndEnsureAudioIsPlaying(force_isac_16k) { 263 if (force_isac_16k) 264 transformSdp = forceIsac16k_; 265 createConnections(null); 266 navigator.webkitGetUserMedia({audio: true, video: true}, 267 addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); 268 269 // Wait until we have gathered samples and can conclude if audio is playing. 270 addExpectedEvent(); 271 var onCallEstablished = function() { 272 gatherAudioLevelSamples(gSecondConnection, 300, 100, 273 function(samples) { 274 verifyAudioIsPlaying(samples); 275 eventOccured(); 276 }); 277 }; 278 279 detectVideoPlaying('remote-view-2', onCallEstablished); 280 } 281 282 function callAndEnsureAudioMutingWorks() { 283 callAndEnsureAudioIsPlaying(); 284 setAllEventsOccuredHandler(function() { 285 var audioTrack = 286 gSecondConnection.getRemoteStreams()[0].getAudioTracks()[0]; 287 288 // Call is up, now mute the track and check everything goes silent (give 289 // it a small delay though, we don't expect it to happen instantly). 290 audioTrack.enabled = false; 291 292 setTimeout(function() { 293 gatherAudioLevelSamples(gSecondConnection, 200, 100, function(samples) { 294 verifyIsSilent(samples); 295 document.title = 'OK'; 296 }); 297 }, 500); 298 }); 299 } 300 301 // Test call with a new Video MediaStream that has been created based on a 302 // stream generated by getUserMedia. 303 function callWithNewVideoMediaStream() { 304 createConnections(null); 305 navigator.webkitGetUserMedia({audio: true, video: true}, 306 createNewVideoStreamAndAddToBothConnections, printGetUserMediaError); 307 waitForVideo('remote-view-1'); 308 waitForVideo('remote-view-2'); 309 } 310 311 // Test call with a new Video MediaStream that has been created based on a 312 // stream generated by getUserMedia. When Video is flowing, an audio track 313 // is added to the sent stream and the video track is removed. This 314 // is to test that adding and removing of remote tracks on an existing 315 // mediastream works. 316 function callWithNewVideoMediaStreamLaterSwitchToAudio() { 317 createConnections(null); 318 navigator.webkitGetUserMedia({audio: true, video: true}, 319 createNewVideoStreamAndAddToBothConnections, printGetUserMediaError); 320 321 waitForVideo('remote-view-1'); 322 waitForVideo('remote-view-2'); 323 324 // Set an event handler for when video is playing. 325 setAllEventsOccuredHandler(function() { 326 // Add an audio track to the local stream and remove the video track and 327 // then renegotiate. But first - setup the expectations. 328 local_stream = gFirstConnection.getLocalStreams()[0]; 329 330 remote_stream_1 = gFirstConnection.getRemoteStreams()[0]; 331 // Add an expected event that onaddtrack will be called on the remote 332 // mediastream received on gFirstConnection when the audio track is 333 // received. 334 addExpectedEvent(); 335 remote_stream_1.onaddtrack = function(){ 336 expectEquals(remote_stream_1.getAudioTracks()[0].id, 337 local_stream.getAudioTracks()[0].id); 338 eventOccured(); 339 } 340 341 // Add an expectation that the received video track is removed from 342 // gFirstConnection. 343 addExpectedEvent(); 344 remote_stream_1.onremovetrack = function() { 345 eventOccured(); 346 } 347 348 // Add an expected event that onaddtrack will be called on the remote 349 // mediastream received on gSecondConnection when the audio track is 350 // received. 351 remote_stream_2 = gSecondConnection.getRemoteStreams()[0]; 352 addExpectedEvent(); 353 remote_stream_2.onaddtrack = function() { 354 expectEquals(remote_stream_2.getAudioTracks()[0].id, 355 local_stream.getAudioTracks()[0].id); 356 eventOccured(); 357 } 358 359 // Add an expectation that the received video track is removed from 360 // gSecondConnection. 361 addExpectedEvent(); 362 remote_stream_2.onremovetrack = function() { 363 eventOccured(); 364 } 365 // When all the above events have occurred- the test pass. 366 setAllEventsOccuredHandler(function() { document.title = 'OK'; }); 367 368 local_stream.addTrack(gLocalStream.getAudioTracks()[0]); 369 local_stream.removeTrack(local_stream.getVideoTracks()[0]); 370 negotiate(); 371 }); // End of setAllEventsOccuredHandler. 372 } 373 374 // This function is used for setting up a test that: 375 // 1. Creates a data channel on |gFirstConnection| and sends data to 376 // |gSecondConnection|. 377 // 2. When data is received on |gSecondConnection| a message 378 // is sent to |gFirstConnection|. 379 // 3. When data is received on |gFirstConnection|, the data 380 // channel is closed. The test passes when the state transition completes. 381 function setupDataChannel(params) { 382 var sendDataString = "send some text on a data channel." 383 firstDataChannel = gFirstConnection.createDataChannel( 384 "sendDataChannel", params); 385 expectEquals('connecting', firstDataChannel.readyState); 386 387 // When |firstDataChannel| transition to open state, send a text string. 388 firstDataChannel.onopen = function() { 389 expectEquals('open', firstDataChannel.readyState); 390 firstDataChannel.send(sendDataString); 391 } 392 393 // When |firstDataChannel| receive a message, close the channel and 394 // initiate a new offer/answer exchange to complete the closure. 395 firstDataChannel.onmessage = function(event) { 396 expectEquals(event.data, sendDataString); 397 firstDataChannel.close(); 398 negotiate(); 399 } 400 401 // When |firstDataChannel| transition to closed state, the test pass. 402 addExpectedEvent(); 403 firstDataChannel.onclose = function() { 404 expectEquals('closed', firstDataChannel.readyState); 405 eventOccured(); 406 } 407 408 // Event handler for when |gSecondConnection| receive a new dataChannel. 409 gSecondConnection.ondatachannel = function (event) { 410 var secondDataChannel = event.channel; 411 412 // When |secondDataChannel| receive a message, send a message back. 413 secondDataChannel.onmessage = function(event) { 414 expectEquals(event.data, sendDataString); 415 expectEquals('open', secondDataChannel.readyState); 416 secondDataChannel.send(sendDataString); 417 } 418 } 419 } 420 421 // SCTP data channel setup is slightly different then RTP based 422 // channels. Due to a bug in libjingle, we can't send data immediately 423 // after channel becomes open. So for that reason in SCTP, 424 // we are sending data from second channel, when ondatachannel event is 425 // received. So data flow happens 2 -> 1 -> 2. 426 function setupSctpDataChannel(params) { 427 var sendDataString = "send some text on a data channel." 428 firstDataChannel = gFirstConnection.createDataChannel( 429 "sendDataChannel", params); 430 expectEquals('connecting', firstDataChannel.readyState); 431 432 // When |firstDataChannel| transition to open state, send a text string. 433 firstDataChannel.onopen = function() { 434 expectEquals('open', firstDataChannel.readyState); 435 } 436 437 // When |firstDataChannel| receive a message, send message back. 438 // initiate a new offer/answer exchange to complete the closure. 439 firstDataChannel.onmessage = function(event) { 440 expectEquals('open', firstDataChannel.readyState); 441 expectEquals(event.data, sendDataString); 442 firstDataChannel.send(sendDataString); 443 } 444 445 446 // Event handler for when |gSecondConnection| receive a new dataChannel. 447 gSecondConnection.ondatachannel = function (event) { 448 var secondDataChannel = event.channel; 449 secondDataChannel.onopen = function() { 450 secondDataChannel.send(sendDataString); 451 } 452 453 // When |secondDataChannel| receive a message, close the channel and 454 // initiate a new offer/answer exchange to complete the closure. 455 secondDataChannel.onmessage = function(event) { 456 expectEquals(event.data, sendDataString); 457 expectEquals('open', secondDataChannel.readyState); 458 secondDataChannel.close(); 459 negotiate(); 460 } 461 462 // When |secondDataChannel| transition to closed state, the test pass. 463 addExpectedEvent(); 464 secondDataChannel.onclose = function() { 465 expectEquals('closed', secondDataChannel.readyState); 466 eventOccured(); 467 } 468 } 469 } 470 471 // Test call with a stream that has been created by getUserMedia, clone 472 // the stream to a cloned stream, send them via the same peer connection. 473 function addTwoMediaStreamsToOneConnection() { 474 createConnections(null); 475 navigator.webkitGetUserMedia({audio: true, video: true}, 476 CloneStreamAndAddTwoStreamstoOneConnection, printGetUserMediaError); 477 } 478 479 function onToneChange(tone) { 480 gSentTones += tone.tone; 481 document.title = gSentTones; 482 } 483 484 function createConnections(constraints) { 485 gFirstConnection = createConnection(constraints, 'remote-view-1'); 486 expectEquals('stable', gFirstConnection.signalingState); 487 488 gSecondConnection = createConnection(constraints, 'remote-view-2'); 489 expectEquals('stable', gSecondConnection.signalingState); 490 } 491 492 function createConnection(constraints, remoteView) { 493 var pc = new webkitRTCPeerConnection(null, constraints); 494 pc.onaddstream = function(event) { 495 onRemoteStream(event, remoteView); 496 } 497 return pc; 498 } 499 500 function displayAndRemember(localStream) { 501 var localStreamUrl = webkitURL.createObjectURL(localStream); 502 $('local-view').src = localStreamUrl; 503 504 gLocalStream = localStream; 505 } 506 507 // Called if getUserMedia fails. 508 function printGetUserMediaError(error) { 509 document.title = 'getUserMedia request failed:'; 510 if (error.constraintName) 511 document.title += ' could not satisfy constraint ' + error.constraintName; 512 else 513 document.title += ' devices not working/user denied access.'; 514 console.log(document.title); 515 } 516 517 // Called if getUserMedia succeeds and we want to send from both connections. 518 function addStreamToBothConnectionsAndNegotiate(localStream) { 519 displayAndRemember(localStream); 520 gFirstConnection.addStream(localStream); 521 gSecondConnection.addStream(localStream); 522 negotiate(); 523 } 524 525 // Called if getUserMedia succeeds when we want to send from one connection. 526 function addStreamToTheFirstConnectionAndNegotiate(localStream) { 527 displayAndRemember(localStream); 528 gFirstConnection.addStream(localStream); 529 negotiate(); 530 } 531 532 function verifyHasOneAudioAndVideoTrack(stream) { 533 expectEquals(1, stream.getAudioTracks().length); 534 expectEquals(1, stream.getVideoTracks().length); 535 } 536 537 // Called if getUserMedia succeeds, then clone the stream, send two streams 538 // from one peer connection. 539 function CloneStreamAndAddTwoStreamstoOneConnection(localStream) { 540 displayAndRemember(localStream); 541 542 var clonedStream = null; 543 if (typeof localStream.clone === "function") { 544 clonedStream = localStream.clone(); 545 } else { 546 clonedStream = new webkitMediaStream(localStream); 547 } 548 549 gFirstConnection.addStream(localStream); 550 gFirstConnection.addStream(clonedStream); 551 552 // Verify the local streams are correct. 553 expectEquals(2, gFirstConnection.getLocalStreams().length); 554 verifyHasOneAudioAndVideoTrack(gFirstConnection.getLocalStreams()[0]); 555 verifyHasOneAudioAndVideoTrack(gFirstConnection.getLocalStreams()[1]); 556 557 // The remote side should receive two streams. After that, verify the 558 // remote side has the correct number of streams and tracks. 559 addExpectedEvent(); 560 addExpectedEvent(); 561 gSecondConnection.onaddstream = function(event) { 562 eventOccured(); 563 } 564 setAllEventsOccuredHandler(function() { 565 // Negotiation complete, verify remote streams on the receiving side. 566 expectEquals(2, gSecondConnection.getRemoteStreams().length); 567 verifyHasOneAudioAndVideoTrack(gSecondConnection.getRemoteStreams()[0]); 568 verifyHasOneAudioAndVideoTrack(gSecondConnection.getRemoteStreams()[1]); 569 570 document.title = "OK"; 571 }); 572 573 negotiate(); 574 } 575 576 // Called if getUserMedia succeeds when we want to send a modified 577 // MediaStream. A new MediaStream is created and the video track from 578 // |localStream| is added. 579 function createNewVideoStreamAndAddToBothConnections(localStream) { 580 displayAndRemember(localStream); 581 var new_stream = new webkitMediaStream(); 582 new_stream.addTrack(localStream.getVideoTracks()[0]); 583 gFirstConnection.addStream(new_stream); 584 gSecondConnection.addStream(new_stream); 585 negotiate(); 586 } 587 588 function negotiate() { 589 negotiateBetween(gFirstConnection, gSecondConnection); 590 } 591 592 function negotiateBetween(caller, callee) { 593 // Not stable = negotiation is ongoing. The behavior of re-negotiating while 594 // a negotiation is ongoing is more or less undefined, so avoid this. 595 if (caller.signalingState != 'stable') 596 throw 'You can only negotiate when the connection is stable!'; 597 598 connectOnIceCandidate(caller, callee); 599 600 caller.createOffer( 601 function (offer) { 602 onOfferCreated(offer, caller, callee); 603 }); 604 } 605 606 function onOfferCreated(offer, caller, callee) { 607 offer.sdp = transformSdp(offer.sdp); 608 caller.setLocalDescription(offer, function() { 609 expectEquals('have-local-offer', caller.signalingState); 610 receiveOffer(offer.sdp, caller, callee); 611 }, onLocalDescriptionError); 612 } 613 614 function receiveOffer(offerSdp, caller, callee) { 615 offerSdp = transformRemoteSdp(offerSdp); 616 617 var parsedOffer = new RTCSessionDescription({ type: 'offer', 618 sdp: offerSdp }); 619 callee.setRemoteDescription(parsedOffer); 620 callee.createAnswer(function (answer) { 621 onAnswerCreated(answer, caller, callee); 622 }); 623 expectEquals('have-remote-offer', callee.signalingState); 624 } 625 626 function removeMsid(offerSdp) { 627 offerSdp = offerSdp.replace(/a=msid-semantic.*\r\n/g, ''); 628 offerSdp = offerSdp.replace('a=mid:audio\r\n', ''); 629 offerSdp = offerSdp.replace('a=mid:video\r\n', ''); 630 offerSdp = offerSdp.replace(/a=ssrc.*\r\n/g, ''); 631 return offerSdp; 632 } 633 634 function removeVideoCodec(offerSdp) { 635 offerSdp = offerSdp.replace('a=rtpmap:100 VP8/90000\r\n', 636 'a=rtpmap:100 XVP8/90000\r\n'); 637 return offerSdp; 638 } 639 640 function removeCrypto(offerSdp) { 641 offerSdp = offerSdp.replace(/a=crypto.*\r\n/g, 'a=Xcrypto\r\n'); 642 offerSdp = offerSdp.replace(/a=fingerprint.*\r\n/g, ''); 643 return offerSdp; 644 } 645 646 function removeBundle(sdp) { 647 return sdp.replace(/a=group:BUNDLE .*\r\n/g, ''); 648 } 649 650 function useGice(sdp) { 651 sdp = sdp.replace(/t=.*\r\n/g, function(subString) { 652 return subString + 'a=ice-options:google-ice\r\n'; 653 }); 654 sdp = sdp.replace(/a=ice-ufrag:.*\r\n/g, 655 'a=ice-ufrag:' + EXTERNAL_GICE_UFRAG + '\r\n'); 656 sdp = sdp.replace(/a=ice-pwd:.*\r\n/g, 657 'a=ice-pwd:' + EXTERNAL_GICE_PWD + '\r\n'); 658 return sdp; 659 } 660 661 function useExternalSdes(sdp) { 662 // Remove current crypto specification. 663 sdp = sdp.replace(/a=crypto.*\r\n/g, ''); 664 sdp = sdp.replace(/a=fingerprint.*\r\n/g, ''); 665 // Add external crypto. This is not compatible with |removeMsid|. 666 sdp = sdp.replace(/a=mid:(\w+)\r\n/g, function(subString, group) { 667 return subString + EXTERNAL_SDES_LINES[group] + '\r\n'; 668 }); 669 return sdp; 670 } 671 672 function onAnswerCreated(answer, caller, callee) { 673 answer.sdp = transformSdp(answer.sdp); 674 callee.setLocalDescription(answer); 675 expectEquals('stable', callee.signalingState); 676 receiveAnswer(answer.sdp, caller); 677 } 678 679 function receiveAnswer(answerSdp, caller) { 680 answerSdp = transformRemoteSdp(answerSdp); 681 var parsedAnswer = new RTCSessionDescription({ type: 'answer', 682 sdp: answerSdp }); 683 caller.setRemoteDescription(parsedAnswer); 684 expectEquals('stable', caller.signalingState); 685 } 686 687 function connectOnIceCandidate(caller, callee) { 688 caller.onicecandidate = function(event) { onIceCandidate(event, callee); } 689 callee.onicecandidate = function(event) { onIceCandidate(event, caller); } 690 } 691 692 function addGiceCredsToCandidate(candidate) { 693 return candidate.trimRight() + 694 ' username ' + EXTERNAL_GICE_UFRAG + ' password ' + EXTERNAL_GICE_PWD; 695 } 696 697 function onIceCandidate(event, target) { 698 if (event.candidate) { 699 var candidate = new RTCIceCandidate(event.candidate); 700 candidate.candidate = transformCandidate(candidate.candidate); 701 target.addIceCandidate(candidate); 702 } 703 } 704 705 function onRemoteStream(e, target) { 706 if (gTestWithoutMsid && e.stream.id != "default") { 707 document.title = 'a default remote stream was expected but instead ' + 708 e.stream.id + ' was received.'; 709 return; 710 } 711 gRemoteStreams[target] = e.stream; 712 var remoteStreamUrl = webkitURL.createObjectURL(e.stream); 713 var remoteVideo = $(target); 714 remoteVideo.src = remoteStreamUrl; 715 } 716 717 </script> 718 </head> 719 <body> 720 <table border="0"> 721 <tr> 722 <td>Local Preview</td> 723 <td>Remote Stream for Connection 1</td> 724 <td>Remote Stream for Connection 2</td> 725 <td>Remote Stream for Connection 3</td> 726 <td>Remote Stream for Connection 4</td> 727 </tr> 728 <tr> 729 <td><video width="320" height="240" id="local-view" 730 autoplay="autoplay"></video></td> 731 <td><video width="320" height="240" id="remote-view-1" 732 autoplay="autoplay"></video></td> 733 <td><video width="320" height="240" id="remote-view-2" 734 autoplay="autoplay"></video></td> 735 <td><video width="320" height="240" id="remote-view-3" 736 autoplay="autoplay"></video></td> 737 <td><video width="320" height="240" id="remote-view-4" 738 autoplay="autoplay"></video></td> 739 <!-- Canvases are named after their corresponding video elements. --> 740 <td><canvas width="320" height="240" id="remote-view-1-canvas" 741 style="display:none"></canvas></td> 742 <td><canvas width="320" height="240" id="remote-view-2-canvas" 743 style="display:none"></canvas></td> 744 <td><canvas width="320" height="240" id="remote-view-3-canvas" 745 style="display:none"></canvas></td> 746 <td><canvas width="320" height="240" id="remote-view-4-canvas" 747 style="display:none"></canvas></td> 748 </tr> 749 </table> 750 </body> 751 </html> 752