1 <html> 2 <head> 3 <script type="text/javascript" src="webrtc_test_utilities.js"></script> 4 <script type="text/javascript"> 5 $ = function(id) { 6 return document.getElementById(id); 7 }; 8 9 var gLocalStream = null; 10 11 setAllEventsOccuredHandler(function() { 12 gLocalStream.stop(); 13 reportTestSuccess(); 14 }); 15 16 function getSources() { 17 MediaStreamTrack.getSources(function(devices) { 18 document.title = 'Media devices available'; 19 sendValueToTest(JSON.stringify(devices)); 20 }); 21 } 22 23 // Creates a MediaStream and renders it locally. When the video is detected to 24 // be rolling, the stream should be stopped. 25 function getUserMediaAndStop(constraints) { 26 console.log('Calling getUserMediaAndStop.'); 27 navigator.webkitGetUserMedia( 28 constraints, 29 function(stream) { displayAndDetectVideo(stream, stopVideoTrack); }, 30 failedCallback); 31 } 32 33 // Requests getusermedia and expects it to fail. The error name is returned 34 // to the test. 35 function getUserMediaAndExpectFailure(constraints) { 36 console.log('Calling getUserMediaAndExpectFailure.'); 37 navigator.webkitGetUserMedia( 38 constraints, 39 function(stream) { failTest('Unexpectedly succeeded getUserMedia.'); }, 40 function(error) { sendValueToTest(error.name); }); 41 } 42 43 function renderClonedMediastreamAndStop(constraints, waitTimeInSeconds) { 44 console.log('Calling renderClonedMediastreamAndStop.'); 45 navigator.webkitGetUserMedia( 46 constraints, 47 function(stream) { 48 var s = stream.clone(); 49 assertEquals(stream.getVideoTracks().length, 1); 50 assertEquals(s.getVideoTracks().length, 1); 51 assertNotEquals(stream.getVideoTracks()[0].id, 52 s.getVideoTracks()[0].id); 53 displayAndDetectVideo( 54 s, 55 function() { 56 reportTestSuccess(); 57 }); 58 }, 59 failedCallback); 60 } 61 62 function renderDuplicatedMediastreamAndStop(constraints, waitTimeInSeconds) { 63 console.log('Calling renderDuplicateMediastreamAndStop.'); 64 navigator.webkitGetUserMedia( 65 constraints, 66 function(stream) { 67 s = new webkitMediaStream(stream); 68 assertEquals(stream.getVideoTracks().length, 1); 69 assertEquals(s.getVideoTracks().length, 1); 70 assertEquals(stream.getVideoTracks()[0].id, 71 s.getVideoTracks()[0].id); 72 displayAndDetectVideo( 73 s, 74 function() { 75 reportTestSuccess(); 76 }); 77 }, 78 failedCallback); 79 } 80 81 function renderSameTrackMediastreamAndStop(constraints, waitTimeInSeconds) { 82 console.log('Calling renderSameTrackMediastreamAndStop.'); 83 navigator.webkitGetUserMedia( 84 constraints, 85 function(stream) { 86 s = new webkitMediaStream(); 87 s.addTrack(stream.getVideoTracks()[0]); 88 assertEquals(s.getVideoTracks().length, 1); 89 assertEquals(s.getVideoTracks().length, 1); 90 assertEquals(stream.getVideoTracks()[0].id, s.getVideoTracks()[0].id); 91 displayAndDetectVideo( 92 s, 93 function() { 94 reportTestSuccess(); 95 }); 96 }, 97 failedCallback); 98 } 99 100 function renderClonedTrackMediastreamAndStop(constraints, waitTimeInSeconds) { 101 console.log('Calling renderClonedTrackMediastreamAndStop.'); 102 navigator.webkitGetUserMedia( 103 constraints, 104 function(stream) { 105 s = new webkitMediaStream(); 106 s.addTrack(stream.getVideoTracks()[0].clone()); 107 assertEquals(s.getVideoTracks().length, 1); 108 assertEquals(s.getVideoTracks().length, 1); 109 assertNotEquals(stream.getVideoTracks()[0].id, 110 s.getVideoTracks()[0].id) 111 displayAndDetectVideo( 112 s, 113 function() { 114 reportTestSuccess(); 115 }); 116 }, 117 failedCallback); 118 } 119 120 // Creates a MediaStream and renders it locally. When the video is detected to 121 // be rolling we return ok-stream-running through the automation controller. 122 function getUserMediaAndGetStreamUp(constraints, waitTimeInSeconds) { 123 console.log('Calling getUserMediaAndGetStreamUp.'); 124 navigator.webkitGetUserMedia( 125 constraints, 126 function(stream) { 127 displayAndDetectVideo( 128 stream, 129 function() { 130 reportTestSuccess(); 131 }); 132 }, 133 failedCallback); 134 } 135 136 function getUserMediaAndRenderInSeveralVideoTags() { 137 navigator.webkitGetUserMedia( 138 {video: true}, 139 createMultipleVideoRenderersAndPause, 140 function(error) { failedCallback(); }); 141 } 142 143 // Gets a video stream up, analyses it and returns the aspect ratio to the 144 // test through the automation controller. 145 function getUserMediaAndAnalyseAndStop(constraints) { 146 console.log('Calling getUserMediaAndAnalyseAndStop.'); 147 navigator.webkitGetUserMedia( 148 constraints, displayDetectAndAnalyzeVideo, failedCallback); 149 } 150 151 // This test that a MediaStream can be cloned and that the clone can 152 // be rendered. 153 function getUserMediaAndClone() { 154 console.log('Calling getUserMediaAndClone.'); 155 navigator.webkitGetUserMedia({video: true, audio: true}, 156 createAndRenderClone, failedCallback); 157 } 158 159 // Creates two MediaStream and renders them locally. When the video of both 160 // streams are detected to be rolling, we stop the local video tracks one at 161 // the time. 162 function twoGetUserMediaAndStop(constraints) { 163 console.log('Calling Two GetUserMedia'); 164 navigator.webkitGetUserMedia( 165 constraints, 166 function(stream) { 167 displayAndDetectVideo(stream, requestSecondGetUserMedia); 168 }, 169 failedCallback); 170 var requestSecondGetUserMedia = function() { 171 navigator.webkitGetUserMedia( 172 constraints, 173 function(stream) { 174 displayIntoVideoElement(stream, 175 function() { 176 stopBothVideoTracksAndVerify(stream); 177 }, 178 'local-view-2'); 179 }, 180 failedCallback); 181 }; 182 183 var stopBothVideoTracksAndVerify = function(streamPlayingInLocalView2) { 184 streamPlayingInLocalView2.getVideoTracks()[0].stop(); 185 waitForVideoToStop('local-view-2'); 186 // Make sure the video track in gLocalStream is still playing in 187 // 'local-view1' and then stop it. 188 displayAndDetectVideo(gLocalStream, stopVideoTrack); 189 }; 190 } 191 192 function twoGetUserMedia(constraints1, constraints2) { 193 console.log('Calling Two GetUserMedia'); 194 var result=""; 195 navigator.webkitGetUserMedia( 196 constraints1, 197 function(stream) { 198 displayDetectAndAnalyzeVideoInElement( 199 stream, 200 function(aspectRatio) { 201 result = aspectRatio; 202 requestSecondGetUserMedia(); 203 }, 204 'local-view'); 205 }, 206 failedCallback); 207 var requestSecondGetUserMedia = function() { 208 navigator.webkitGetUserMedia( 209 constraints2, 210 function(stream) { 211 displayDetectAndAnalyzeVideoInElement( 212 stream, 213 function(aspectRatio) { 214 result = result + '-' + aspectRatio; 215 sendValueToTest(result); 216 }, 217 'local-view-2'); 218 }, 219 failedCallback); 220 } 221 } 222 223 function failedCallback(error) { 224 failTest('GetUserMedia call failed with code ' + error.code); 225 } 226 227 function plugStreamIntoVideoElement(stream, videoElement) { 228 gLocalStream = stream; 229 var localStreamUrl = URL.createObjectURL(stream); 230 $(videoElement).src = localStreamUrl; 231 } 232 233 function displayIntoVideoElement(stream, callback, videoElement) { 234 plugStreamIntoVideoElement(stream, videoElement); 235 detectVideoPlaying(videoElement, callback); 236 } 237 238 function displayAndDetectVideo(stream, callback) { 239 displayIntoVideoElement(stream, callback, 'local-view'); 240 } 241 242 function displayDetectAndAnalyzeVideo(stream) { 243 displayDetectAndAnalyzeVideoInElement(stream, 244 function(aspectRatio) { 245 sendValueToTest(aspectRatio); 246 }, 247 'local-view'); 248 } 249 250 function displayDetectAndAnalyzeVideoInElement( 251 stream, callback, videoElement) { 252 plugStreamIntoVideoElement(stream, videoElement); 253 detectAspectRatio(callback, videoElement); 254 } 255 256 function createAndRenderClone(stream) { 257 gLocalStream = stream; 258 // TODO(perkj): --use-fake-device-for-media-stream do not currently 259 // work with audio devices and not all bots has a microphone. 260 new_stream = new webkitMediaStream(); 261 new_stream.addTrack(stream.getVideoTracks()[0]); 262 assertEquals(new_stream.getVideoTracks().length, 1); 263 if (stream.getAudioTracks().length > 0) { 264 new_stream.addTrack(stream.getAudioTracks()[0]); 265 assertEquals(new_stream.getAudioTracks().length, 1); 266 new_stream.removeTrack(new_stream.getAudioTracks()[0]); 267 assertEquals(new_stream.getAudioTracks().length, 0); 268 } 269 270 var newStreamUrl = URL.createObjectURL(new_stream); 271 $('local-view').src = newStreamUrl; 272 waitForVideo('local-view'); 273 } 274 275 function stopVideoTrack() { 276 gLocalStream.getVideoTracks()[0].stop(); 277 waitForVideoToStop('local-view'); 278 } 279 280 function waitAndStopVideoTrack(waitTimeInSeconds) { 281 setTimeout(stopVideoTrack, waitTimeInSeconds * 1000); 282 } 283 284 // This test make sure multiple video renderers can be created for the same 285 // local video track and make sure a renderer can still render if other 286 // renderers are paused. See http://crbug/352619. 287 function createMultipleVideoRenderersAndPause(stream) { 288 function createDetectableRenderer(stream, id) { 289 var video = document.createElement('video'); 290 document.body.appendChild(video); 291 var localStreamUrl = URL.createObjectURL(stream); 292 video.id = id; 293 video.src = localStreamUrl; 294 video.autoplay = true; 295 video.play(); 296 // The detector needs a canvas. 297 var canvas = document.createElement('canvas'); 298 canvas.id = video.id + "-canvas"; 299 document.body.appendChild(canvas); 300 }; 301 302 // Once 3 renderers are created and paused, create one last renderer and 303 // make sure it can play video. 304 setAllEventsOccuredHandler(function() { 305 var id = "lastVideoTag"; 306 createDetectableRenderer(stream, id); 307 detectVideoPlaying(id, function () { reportTestSuccess(); }); 308 }); 309 310 // Create 3 video renderers and pause them once video is playing. 311 for (var i = 0; i < 3; ++i) { 312 var id = "video" + i; 313 createDetectableRenderer(stream, id); 314 addExpectedEvent(); 315 // |video_detected_function| creates a new function that pause the video 316 // tag |id|. 317 var video_detected_function = 318 function (j) { 319 return function () { 320 console.log("pause " + j); 321 $(j).pause(); 322 eventOccured(); 323 }; 324 }; 325 // Detect video id |id| and trigger the function returned by 326 // |video_detected_function| when video is playing. 327 detectVideoPlaying(id, video_detected_function(id)); 328 } 329 } 330 331 // This function tries to calculate the aspect ratio shown by the fake capture 332 // device in the video tag. For this, we count the amount of light green 333 // pixels along |aperture| pixels on the positive X and Y axis starting from 334 // the center of the image. In this very center there should be a time-varying 335 // pacman; the algorithm counts for a couple of iterations and keeps the 336 // maximum amount of light green pixels on both directions. From this data 337 // the aspect ratio is calculated and the test fails if the number of green 338 // pixels are not the same along the X and Y axis. 339 // The result of the analysis is sent back to the test as a string on the 340 // format "w=xxx:h=yyy". 341 function detectAspectRatio(callback, videoElementName) { 342 var videoElement = $(videoElementName); 343 var canvas = $(videoElementName + '-canvas'); 344 345 var maxLightGreenPixelsX = 0; 346 var maxLightGreenPixelsY = 0; 347 348 var iterations = 0; 349 var maxIterations = 10; 350 351 var detectorFunction = function() { 352 var width = videoElement.videoWidth; 353 var height = videoElement.videoHeight; 354 if (width == 0 || height == 0) 355 return; 356 357 canvas.width = width; 358 canvas.height = height; 359 var aperture = Math.min(width, height) / 2; 360 var context = canvas.getContext('2d'); 361 context.drawImage(videoElement, 0, 0, width, height); 362 363 // We are interested in a window starting from the center of the image 364 // where we expect the circle from the fake video capture to be rolling. 365 var pixels = context.getImageData(width / 2, height / 2, 366 aperture, aperture); 367 368 var lightGreenPixelsX = 0; 369 var lightGreenPixelsY = 0; 370 371 // Walk horizontally counting light green pixels. 372 for (var x = 0; x < aperture; ++x) { 373 if (pixels.data[4 * x + 1] != COLOR_BACKGROUND_GREEN) 374 lightGreenPixelsX++; 375 } 376 // Walk vertically counting light green pixels. 377 for (var y = 0; y < aperture; ++y) { 378 if (pixels.data[4 * y * aperture + 1] != COLOR_BACKGROUND_GREEN) 379 lightGreenPixelsY++; 380 } 381 if (lightGreenPixelsX > maxLightGreenPixelsX) 382 maxLightGreenPixelsX = lightGreenPixelsX; 383 if (lightGreenPixelsY > maxLightGreenPixelsY) 384 maxLightGreenPixelsY = lightGreenPixelsY; 385 386 if (++iterations > maxIterations) { 387 clearInterval(detectorInterval); 388 // Allow maxLightGreenPixelsY = maxLightGreenPixelsX +-1 due to 389 // possible subpixel rendering on Mac and Android. 390 if (maxLightGreenPixelsY > maxLightGreenPixelsX + 1 || 391 maxLightGreenPixelsY < maxLightGreenPixelsX -1 || 392 maxLightGreenPixelsY == 0 || 393 maxLightGreenPixelsX == width || maxLightGreenPixelsY == height) { 394 failTest("Aspect ratio corrupted. X " + maxLightGreenPixelsX + 395 " Y " + maxLightGreenPixelsY); 396 } 397 398 var result = "w=" + width + ":h=" + height; 399 console.log(result); 400 callback(result); 401 } 402 } 403 var detectorInterval = setInterval(detectorFunction, 50); 404 } 405 </script> 406 </head> 407 <body> 408 <table border="0"> 409 <tr> 410 <td>Local Preview</td> 411 </tr> 412 <tr> 413 <td><video width="320" height="240" id="local-view" 414 autoplay="autoplay"></video></td> 415 <td><canvas id="local-view-canvas" 416 style="display:none"></canvas></td> 417 </tr> 418 <tr> 419 <td>Local Preview 2</td> 420 </tr> 421 <tr> 422 <td><video width="320" height="240" id="local-view-2" 423 autoplay="autoplay"></video></td> 424 <!-- Canvases are named after their corresponding video elements. --> 425 <td><canvas width="320" height="240" id="local-view-2-canvas" 426 style="display:none"></canvas></td> 427 </tr> 428 </table> 429 </body> 430 </html> 431