1 // Copyright 2013 The Chromium Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "media/blink/webmediaplayer_impl.h" 6 7 #include <algorithm> 8 #include <limits> 9 #include <string> 10 #include <vector> 11 12 #include "base/bind.h" 13 #include "base/callback.h" 14 #include "base/callback_helpers.h" 15 #include "base/debug/alias.h" 16 #include "base/debug/crash_logging.h" 17 #include "base/debug/trace_event.h" 18 #include "base/message_loop/message_loop_proxy.h" 19 #include "base/metrics/histogram.h" 20 #include "base/single_thread_task_runner.h" 21 #include "base/synchronization/waitable_event.h" 22 #include "cc/blink/web_layer_impl.h" 23 #include "cc/layers/video_layer.h" 24 #include "gpu/GLES2/gl2extchromium.h" 25 #include "gpu/command_buffer/common/mailbox_holder.h" 26 #include "media/audio/null_audio_sink.h" 27 #include "media/base/audio_hardware_config.h" 28 #include "media/base/bind_to_current_loop.h" 29 #include "media/base/limits.h" 30 #include "media/base/media_log.h" 31 #include "media/base/pipeline.h" 32 #include "media/base/text_renderer.h" 33 #include "media/base/video_frame.h" 34 #include "media/blink/buffered_data_source.h" 35 #include "media/blink/encrypted_media_player_support.h" 36 #include "media/blink/texttrack_impl.h" 37 #include "media/blink/webaudiosourceprovider_impl.h" 38 #include "media/blink/webinbandtexttrack_impl.h" 39 #include "media/blink/webmediaplayer_delegate.h" 40 #include "media/blink/webmediaplayer_params.h" 41 #include "media/blink/webmediaplayer_util.h" 42 #include "media/blink/webmediasource_impl.h" 43 #include "media/filters/audio_renderer_impl.h" 44 #include "media/filters/chunk_demuxer.h" 45 #include "media/filters/ffmpeg_audio_decoder.h" 46 #include "media/filters/ffmpeg_demuxer.h" 47 #include "media/filters/ffmpeg_video_decoder.h" 48 #include "media/filters/gpu_video_accelerator_factories.h" 49 #include "media/filters/gpu_video_decoder.h" 50 #include "media/filters/opus_audio_decoder.h" 51 #include "media/filters/renderer_impl.h" 52 #include "media/filters/video_renderer_impl.h" 53 #include "media/filters/vpx_video_decoder.h" 54 #include "third_party/WebKit/public/platform/WebMediaSource.h" 55 #include "third_party/WebKit/public/platform/WebRect.h" 56 #include "third_party/WebKit/public/platform/WebSize.h" 57 #include "third_party/WebKit/public/platform/WebString.h" 58 #include "third_party/WebKit/public/platform/WebURL.h" 59 #include "third_party/WebKit/public/web/WebLocalFrame.h" 60 #include "third_party/WebKit/public/web/WebSecurityOrigin.h" 61 #include "third_party/WebKit/public/web/WebView.h" 62 63 using blink::WebCanvas; 64 using blink::WebMediaPlayer; 65 using blink::WebRect; 66 using blink::WebSize; 67 using blink::WebString; 68 69 namespace { 70 71 // Limits the range of playback rate. 72 // 73 // TODO(kylep): Revisit these. 74 // 75 // Vista has substantially lower performance than XP or Windows7. If you speed 76 // up a video too much, it can't keep up, and rendering stops updating except on 77 // the time bar. For really high speeds, audio becomes a bottleneck and we just 78 // use up the data we have, which may not achieve the speed requested, but will 79 // not crash the tab. 80 // 81 // A very slow speed, ie 0.00000001x, causes the machine to lock up. (It seems 82 // like a busy loop). It gets unresponsive, although its not completely dead. 83 // 84 // Also our timers are not very accurate (especially for ogg), which becomes 85 // evident at low speeds and on Vista. Since other speeds are risky and outside 86 // the norms, we think 1/16x to 16x is a safe and useful range for now. 87 const double kMinRate = 0.0625; 88 const double kMaxRate = 16.0; 89 90 class SyncPointClientImpl : public media::VideoFrame::SyncPointClient { 91 public: 92 explicit SyncPointClientImpl( 93 blink::WebGraphicsContext3D* web_graphics_context) 94 : web_graphics_context_(web_graphics_context) {} 95 virtual ~SyncPointClientImpl() {} 96 virtual uint32 InsertSyncPoint() OVERRIDE { 97 return web_graphics_context_->insertSyncPoint(); 98 } 99 virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE { 100 web_graphics_context_->waitSyncPoint(sync_point); 101 } 102 103 private: 104 blink::WebGraphicsContext3D* web_graphics_context_; 105 }; 106 107 } // namespace 108 109 namespace media { 110 111 class BufferedDataSourceHostImpl; 112 113 #define COMPILE_ASSERT_MATCHING_ENUM(name) \ 114 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::CORSMode ## name) == \ 115 static_cast<int>(BufferedResourceLoader::k ## name), \ 116 mismatching_enums) 117 COMPILE_ASSERT_MATCHING_ENUM(Unspecified); 118 COMPILE_ASSERT_MATCHING_ENUM(Anonymous); 119 COMPILE_ASSERT_MATCHING_ENUM(UseCredentials); 120 #undef COMPILE_ASSERT_MATCHING_ENUM 121 122 #define BIND_TO_RENDER_LOOP(function) \ 123 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ 124 BindToCurrentLoop(base::Bind(function, AsWeakPtr()))) 125 126 #define BIND_TO_RENDER_LOOP1(function, arg1) \ 127 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \ 128 BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1))) 129 130 static void LogMediaSourceError(const scoped_refptr<MediaLog>& media_log, 131 const std::string& error) { 132 media_log->AddEvent(media_log->CreateMediaSourceErrorEvent(error)); 133 } 134 135 WebMediaPlayerImpl::WebMediaPlayerImpl( 136 blink::WebLocalFrame* frame, 137 blink::WebMediaPlayerClient* client, 138 base::WeakPtr<WebMediaPlayerDelegate> delegate, 139 const WebMediaPlayerParams& params) 140 : frame_(frame), 141 network_state_(WebMediaPlayer::NetworkStateEmpty), 142 ready_state_(WebMediaPlayer::ReadyStateHaveNothing), 143 preload_(BufferedDataSource::AUTO), 144 main_task_runner_(base::MessageLoopProxy::current()), 145 media_task_runner_(params.media_task_runner()), 146 media_log_(params.media_log()), 147 pipeline_(media_task_runner_, media_log_.get()), 148 load_type_(LoadTypeURL), 149 opaque_(false), 150 paused_(true), 151 seeking_(false), 152 playback_rate_(0.0f), 153 ended_(false), 154 pending_seek_(false), 155 pending_seek_seconds_(0.0f), 156 should_notify_time_changed_(false), 157 client_(client), 158 delegate_(delegate), 159 defer_load_cb_(params.defer_load_cb()), 160 gpu_factories_(params.gpu_factories()), 161 supports_save_(true), 162 chunk_demuxer_(NULL), 163 compositor_task_runner_(params.compositor_task_runner()), 164 compositor_(new VideoFrameCompositor( 165 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnNaturalSizeChanged), 166 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnOpacityChanged))), 167 text_track_index_(0), 168 encrypted_media_support_( 169 params.CreateEncryptedMediaPlayerSupport(client)), 170 audio_hardware_config_(params.audio_hardware_config()) { 171 DCHECK(encrypted_media_support_); 172 173 // Threaded compositing isn't enabled universally yet. 174 if (!compositor_task_runner_.get()) 175 compositor_task_runner_ = base::MessageLoopProxy::current(); 176 177 media_log_->AddEvent( 178 media_log_->CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_CREATED)); 179 180 // |gpu_factories_| requires that its entry points be called on its 181 // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the 182 // factories, require that their message loops are identical. 183 DCHECK(!gpu_factories_.get() || 184 (gpu_factories_->GetTaskRunner() == media_task_runner_.get())); 185 186 // Use the null sink if no sink was provided. 187 audio_source_provider_ = new WebAudioSourceProviderImpl( 188 params.audio_renderer_sink().get() 189 ? params.audio_renderer_sink() 190 : new NullAudioSink(media_task_runner_)); 191 } 192 193 WebMediaPlayerImpl::~WebMediaPlayerImpl() { 194 client_->setWebLayer(NULL); 195 196 DCHECK(main_task_runner_->BelongsToCurrentThread()); 197 media_log_->AddEvent( 198 media_log_->CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_DESTROYED)); 199 200 if (delegate_) 201 delegate_->PlayerGone(this); 202 203 // Abort any pending IO so stopping the pipeline doesn't get blocked. 204 if (data_source_) 205 data_source_->Abort(); 206 if (chunk_demuxer_) { 207 chunk_demuxer_->Shutdown(); 208 chunk_demuxer_ = NULL; 209 } 210 211 gpu_factories_ = NULL; 212 213 // Make sure to kill the pipeline so there's no more media threads running. 214 // Note: stopping the pipeline might block for a long time. 215 base::WaitableEvent waiter(false, false); 216 pipeline_.Stop( 217 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&waiter))); 218 waiter.Wait(); 219 220 compositor_task_runner_->DeleteSoon(FROM_HERE, compositor_); 221 } 222 223 void WebMediaPlayerImpl::load(LoadType load_type, const blink::WebURL& url, 224 CORSMode cors_mode) { 225 DVLOG(1) << __FUNCTION__ << "(" << load_type << ", " << url << ", " 226 << cors_mode << ")"; 227 if (!defer_load_cb_.is_null()) { 228 defer_load_cb_.Run(base::Bind( 229 &WebMediaPlayerImpl::DoLoad, AsWeakPtr(), load_type, url, cors_mode)); 230 return; 231 } 232 DoLoad(load_type, url, cors_mode); 233 } 234 235 void WebMediaPlayerImpl::DoLoad(LoadType load_type, 236 const blink::WebURL& url, 237 CORSMode cors_mode) { 238 DCHECK(main_task_runner_->BelongsToCurrentThread()); 239 240 GURL gurl(url); 241 ReportMediaSchemeUma(gurl); 242 243 // Set subresource URL for crash reporting. 244 base::debug::SetCrashKeyValue("subresource_url", gurl.spec()); 245 246 load_type_ = load_type; 247 248 SetNetworkState(WebMediaPlayer::NetworkStateLoading); 249 SetReadyState(WebMediaPlayer::ReadyStateHaveNothing); 250 media_log_->AddEvent(media_log_->CreateLoadEvent(url.spec())); 251 252 // Media source pipelines can start immediately. 253 if (load_type == LoadTypeMediaSource) { 254 supports_save_ = false; 255 StartPipeline(); 256 return; 257 } 258 259 // Otherwise it's a regular request which requires resolving the URL first. 260 data_source_.reset(new BufferedDataSource( 261 url, 262 static_cast<BufferedResourceLoader::CORSMode>(cors_mode), 263 main_task_runner_, 264 frame_, 265 media_log_.get(), 266 &buffered_data_source_host_, 267 base::Bind(&WebMediaPlayerImpl::NotifyDownloading, AsWeakPtr()))); 268 data_source_->Initialize( 269 base::Bind(&WebMediaPlayerImpl::DataSourceInitialized, AsWeakPtr())); 270 data_source_->SetPreload(preload_); 271 } 272 273 void WebMediaPlayerImpl::play() { 274 DVLOG(1) << __FUNCTION__; 275 DCHECK(main_task_runner_->BelongsToCurrentThread()); 276 277 paused_ = false; 278 pipeline_.SetPlaybackRate(playback_rate_); 279 if (data_source_) 280 data_source_->MediaIsPlaying(); 281 282 media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PLAY)); 283 284 if (delegate_ && playback_rate_ > 0) 285 delegate_->DidPlay(this); 286 } 287 288 void WebMediaPlayerImpl::pause() { 289 DVLOG(1) << __FUNCTION__; 290 DCHECK(main_task_runner_->BelongsToCurrentThread()); 291 292 const bool was_already_paused = paused_ || playback_rate_ == 0; 293 paused_ = true; 294 pipeline_.SetPlaybackRate(0.0f); 295 if (data_source_) 296 data_source_->MediaIsPaused(); 297 paused_time_ = pipeline_.GetMediaTime(); 298 299 media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PAUSE)); 300 301 if (!was_already_paused && delegate_) 302 delegate_->DidPause(this); 303 } 304 305 bool WebMediaPlayerImpl::supportsSave() const { 306 DCHECK(main_task_runner_->BelongsToCurrentThread()); 307 return supports_save_; 308 } 309 310 void WebMediaPlayerImpl::seek(double seconds) { 311 DVLOG(1) << __FUNCTION__ << "(" << seconds << ")"; 312 DCHECK(main_task_runner_->BelongsToCurrentThread()); 313 314 ended_ = false; 315 316 if (ready_state_ > WebMediaPlayer::ReadyStateHaveMetadata) 317 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); 318 319 base::TimeDelta seek_time = ConvertSecondsToTimestamp(seconds); 320 321 if (seeking_) { 322 pending_seek_ = true; 323 pending_seek_seconds_ = seconds; 324 if (chunk_demuxer_) 325 chunk_demuxer_->CancelPendingSeek(seek_time); 326 return; 327 } 328 329 media_log_->AddEvent(media_log_->CreateSeekEvent(seconds)); 330 331 // Update our paused time. 332 if (paused_) 333 paused_time_ = seek_time; 334 335 seeking_ = true; 336 337 if (chunk_demuxer_) 338 chunk_demuxer_->StartWaitingForSeek(seek_time); 339 340 // Kick off the asynchronous seek! 341 pipeline_.Seek( 342 seek_time, 343 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked, true)); 344 } 345 346 void WebMediaPlayerImpl::setRate(double rate) { 347 DVLOG(1) << __FUNCTION__ << "(" << rate << ")"; 348 DCHECK(main_task_runner_->BelongsToCurrentThread()); 349 350 // TODO(kylep): Remove when support for negatives is added. Also, modify the 351 // following checks so rewind uses reasonable values also. 352 if (rate < 0.0) 353 return; 354 355 // Limit rates to reasonable values by clamping. 356 if (rate != 0.0) { 357 if (rate < kMinRate) 358 rate = kMinRate; 359 else if (rate > kMaxRate) 360 rate = kMaxRate; 361 if (playback_rate_ == 0 && !paused_ && delegate_) 362 delegate_->DidPlay(this); 363 } else if (playback_rate_ != 0 && !paused_ && delegate_) { 364 delegate_->DidPause(this); 365 } 366 367 playback_rate_ = rate; 368 if (!paused_) { 369 pipeline_.SetPlaybackRate(rate); 370 if (data_source_) 371 data_source_->MediaPlaybackRateChanged(rate); 372 } 373 } 374 375 void WebMediaPlayerImpl::setVolume(double volume) { 376 DVLOG(1) << __FUNCTION__ << "(" << volume << ")"; 377 DCHECK(main_task_runner_->BelongsToCurrentThread()); 378 379 pipeline_.SetVolume(volume); 380 } 381 382 #define COMPILE_ASSERT_MATCHING_ENUM(webkit_name, chromium_name) \ 383 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::webkit_name) == \ 384 static_cast<int>(BufferedDataSource::chromium_name), \ 385 mismatching_enums) 386 COMPILE_ASSERT_MATCHING_ENUM(PreloadNone, NONE); 387 COMPILE_ASSERT_MATCHING_ENUM(PreloadMetaData, METADATA); 388 COMPILE_ASSERT_MATCHING_ENUM(PreloadAuto, AUTO); 389 #undef COMPILE_ASSERT_MATCHING_ENUM 390 391 void WebMediaPlayerImpl::setPreload(WebMediaPlayer::Preload preload) { 392 DVLOG(1) << __FUNCTION__ << "(" << preload << ")"; 393 DCHECK(main_task_runner_->BelongsToCurrentThread()); 394 395 preload_ = static_cast<BufferedDataSource::Preload>(preload); 396 if (data_source_) 397 data_source_->SetPreload(preload_); 398 } 399 400 bool WebMediaPlayerImpl::hasVideo() const { 401 DCHECK(main_task_runner_->BelongsToCurrentThread()); 402 403 return pipeline_metadata_.has_video; 404 } 405 406 bool WebMediaPlayerImpl::hasAudio() const { 407 DCHECK(main_task_runner_->BelongsToCurrentThread()); 408 409 return pipeline_metadata_.has_audio; 410 } 411 412 blink::WebSize WebMediaPlayerImpl::naturalSize() const { 413 DCHECK(main_task_runner_->BelongsToCurrentThread()); 414 415 return blink::WebSize(pipeline_metadata_.natural_size); 416 } 417 418 bool WebMediaPlayerImpl::paused() const { 419 DCHECK(main_task_runner_->BelongsToCurrentThread()); 420 421 return pipeline_.GetPlaybackRate() == 0.0f; 422 } 423 424 bool WebMediaPlayerImpl::seeking() const { 425 DCHECK(main_task_runner_->BelongsToCurrentThread()); 426 427 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) 428 return false; 429 430 return seeking_; 431 } 432 433 double WebMediaPlayerImpl::duration() const { 434 DCHECK(main_task_runner_->BelongsToCurrentThread()); 435 436 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) 437 return std::numeric_limits<double>::quiet_NaN(); 438 439 return GetPipelineDuration(); 440 } 441 442 double WebMediaPlayerImpl::timelineOffset() const { 443 DCHECK(main_task_runner_->BelongsToCurrentThread()); 444 445 if (pipeline_metadata_.timeline_offset.is_null()) 446 return std::numeric_limits<double>::quiet_NaN(); 447 448 return pipeline_metadata_.timeline_offset.ToJsTime(); 449 } 450 451 double WebMediaPlayerImpl::currentTime() const { 452 DCHECK(main_task_runner_->BelongsToCurrentThread()); 453 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); 454 455 // TODO(scherkus): Replace with an explicit ended signal to HTMLMediaElement, 456 // see http://crbug.com/409280 457 if (ended_) 458 return duration(); 459 460 return (paused_ ? paused_time_ : pipeline_.GetMediaTime()).InSecondsF(); 461 } 462 463 WebMediaPlayer::NetworkState WebMediaPlayerImpl::networkState() const { 464 DCHECK(main_task_runner_->BelongsToCurrentThread()); 465 return network_state_; 466 } 467 468 WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const { 469 DCHECK(main_task_runner_->BelongsToCurrentThread()); 470 return ready_state_; 471 } 472 473 blink::WebTimeRanges WebMediaPlayerImpl::buffered() const { 474 DCHECK(main_task_runner_->BelongsToCurrentThread()); 475 476 Ranges<base::TimeDelta> buffered_time_ranges = 477 pipeline_.GetBufferedTimeRanges(); 478 479 const base::TimeDelta duration = pipeline_.GetMediaDuration(); 480 if (duration != kInfiniteDuration()) { 481 buffered_data_source_host_.AddBufferedTimeRanges( 482 &buffered_time_ranges, duration); 483 } 484 return ConvertToWebTimeRanges(buffered_time_ranges); 485 } 486 487 double WebMediaPlayerImpl::maxTimeSeekable() const { 488 DCHECK(main_task_runner_->BelongsToCurrentThread()); 489 490 // If we haven't even gotten to ReadyStateHaveMetadata yet then just 491 // return 0 so that the seekable range is empty. 492 if (ready_state_ < WebMediaPlayer::ReadyStateHaveMetadata) 493 return 0.0; 494 495 // We don't support seeking in streaming media. 496 if (data_source_ && data_source_->IsStreaming()) 497 return 0.0; 498 return duration(); 499 } 500 501 bool WebMediaPlayerImpl::didLoadingProgress() { 502 DCHECK(main_task_runner_->BelongsToCurrentThread()); 503 bool pipeline_progress = pipeline_.DidLoadingProgress(); 504 bool data_progress = buffered_data_source_host_.DidLoadingProgress(); 505 return pipeline_progress || data_progress; 506 } 507 508 void WebMediaPlayerImpl::paint(blink::WebCanvas* canvas, 509 const blink::WebRect& rect, 510 unsigned char alpha) { 511 paint(canvas, rect, alpha, SkXfermode::kSrcOver_Mode); 512 } 513 514 void WebMediaPlayerImpl::paint(blink::WebCanvas* canvas, 515 const blink::WebRect& rect, 516 unsigned char alpha, 517 SkXfermode::Mode mode) { 518 DCHECK(main_task_runner_->BelongsToCurrentThread()); 519 TRACE_EVENT0("media", "WebMediaPlayerImpl:paint"); 520 521 // TODO(scherkus): Clarify paint() API contract to better understand when and 522 // why it's being called. For example, today paint() is called when: 523 // - We haven't reached HAVE_CURRENT_DATA and need to paint black 524 // - We're painting to a canvas 525 // See http://crbug.com/341225 http://crbug.com/342621 for details. 526 scoped_refptr<VideoFrame> video_frame = 527 GetCurrentFrameFromCompositor(); 528 529 gfx::Rect gfx_rect(rect); 530 531 skcanvas_video_renderer_.Paint(video_frame, 532 canvas, 533 gfx_rect, 534 alpha, 535 mode, 536 pipeline_metadata_.video_rotation); 537 } 538 539 bool WebMediaPlayerImpl::hasSingleSecurityOrigin() const { 540 if (data_source_) 541 return data_source_->HasSingleOrigin(); 542 return true; 543 } 544 545 bool WebMediaPlayerImpl::didPassCORSAccessCheck() const { 546 if (data_source_) 547 return data_source_->DidPassCORSAccessCheck(); 548 return false; 549 } 550 551 double WebMediaPlayerImpl::mediaTimeForTimeValue(double timeValue) const { 552 return ConvertSecondsToTimestamp(timeValue).InSecondsF(); 553 } 554 555 unsigned WebMediaPlayerImpl::decodedFrameCount() const { 556 DCHECK(main_task_runner_->BelongsToCurrentThread()); 557 558 PipelineStatistics stats = pipeline_.GetStatistics(); 559 return stats.video_frames_decoded; 560 } 561 562 unsigned WebMediaPlayerImpl::droppedFrameCount() const { 563 DCHECK(main_task_runner_->BelongsToCurrentThread()); 564 565 PipelineStatistics stats = pipeline_.GetStatistics(); 566 return stats.video_frames_dropped; 567 } 568 569 unsigned WebMediaPlayerImpl::audioDecodedByteCount() const { 570 DCHECK(main_task_runner_->BelongsToCurrentThread()); 571 572 PipelineStatistics stats = pipeline_.GetStatistics(); 573 return stats.audio_bytes_decoded; 574 } 575 576 unsigned WebMediaPlayerImpl::videoDecodedByteCount() const { 577 DCHECK(main_task_runner_->BelongsToCurrentThread()); 578 579 PipelineStatistics stats = pipeline_.GetStatistics(); 580 return stats.video_bytes_decoded; 581 } 582 583 bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture( 584 blink::WebGraphicsContext3D* web_graphics_context, 585 unsigned int texture, 586 unsigned int level, 587 unsigned int internal_format, 588 unsigned int type, 589 bool premultiply_alpha, 590 bool flip_y) { 591 TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture"); 592 593 scoped_refptr<VideoFrame> video_frame = 594 GetCurrentFrameFromCompositor(); 595 596 if (!video_frame.get()) 597 return false; 598 if (video_frame->format() != VideoFrame::NATIVE_TEXTURE) 599 return false; 600 601 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); 602 if (mailbox_holder->texture_target != GL_TEXTURE_2D) 603 return false; 604 605 web_graphics_context->waitSyncPoint(mailbox_holder->sync_point); 606 uint32 source_texture = web_graphics_context->createAndConsumeTextureCHROMIUM( 607 GL_TEXTURE_2D, mailbox_holder->mailbox.name); 608 609 // The video is stored in a unmultiplied format, so premultiply 610 // if necessary. 611 web_graphics_context->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, 612 premultiply_alpha); 613 // Application itself needs to take care of setting the right flip_y 614 // value down to get the expected result. 615 // flip_y==true means to reverse the video orientation while 616 // flip_y==false means to keep the intrinsic orientation. 617 web_graphics_context->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, flip_y); 618 web_graphics_context->copyTextureCHROMIUM(GL_TEXTURE_2D, 619 source_texture, 620 texture, 621 level, 622 internal_format, 623 type); 624 web_graphics_context->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, false); 625 web_graphics_context->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, 626 false); 627 628 web_graphics_context->deleteTexture(source_texture); 629 web_graphics_context->flush(); 630 631 SyncPointClientImpl client(web_graphics_context); 632 video_frame->UpdateReleaseSyncPoint(&client); 633 return true; 634 } 635 636 WebMediaPlayer::MediaKeyException 637 WebMediaPlayerImpl::generateKeyRequest(const WebString& key_system, 638 const unsigned char* init_data, 639 unsigned init_data_length) { 640 DCHECK(main_task_runner_->BelongsToCurrentThread()); 641 642 return encrypted_media_support_->GenerateKeyRequest( 643 frame_, key_system, init_data, init_data_length); 644 } 645 646 WebMediaPlayer::MediaKeyException WebMediaPlayerImpl::addKey( 647 const WebString& key_system, 648 const unsigned char* key, 649 unsigned key_length, 650 const unsigned char* init_data, 651 unsigned init_data_length, 652 const WebString& session_id) { 653 DCHECK(main_task_runner_->BelongsToCurrentThread()); 654 655 return encrypted_media_support_->AddKey( 656 key_system, key, key_length, init_data, init_data_length, session_id); 657 } 658 659 WebMediaPlayer::MediaKeyException WebMediaPlayerImpl::cancelKeyRequest( 660 const WebString& key_system, 661 const WebString& session_id) { 662 DCHECK(main_task_runner_->BelongsToCurrentThread()); 663 664 return encrypted_media_support_->CancelKeyRequest(key_system, session_id); 665 } 666 667 void WebMediaPlayerImpl::setContentDecryptionModule( 668 blink::WebContentDecryptionModule* cdm) { 669 DCHECK(main_task_runner_->BelongsToCurrentThread()); 670 671 encrypted_media_support_->SetContentDecryptionModule(cdm); 672 } 673 674 void WebMediaPlayerImpl::setContentDecryptionModule( 675 blink::WebContentDecryptionModule* cdm, 676 blink::WebContentDecryptionModuleResult result) { 677 DCHECK(main_task_runner_->BelongsToCurrentThread()); 678 679 encrypted_media_support_->SetContentDecryptionModule(cdm, result); 680 } 681 682 void WebMediaPlayerImpl::OnPipelineSeeked(bool time_changed, 683 PipelineStatus status) { 684 DVLOG(1) << __FUNCTION__ << "(" << time_changed << ", " << status << ")"; 685 DCHECK(main_task_runner_->BelongsToCurrentThread()); 686 seeking_ = false; 687 if (pending_seek_) { 688 pending_seek_ = false; 689 seek(pending_seek_seconds_); 690 return; 691 } 692 693 if (status != PIPELINE_OK) { 694 OnPipelineError(status); 695 return; 696 } 697 698 // Update our paused time. 699 if (paused_) 700 paused_time_ = pipeline_.GetMediaTime(); 701 702 should_notify_time_changed_ = time_changed; 703 } 704 705 void WebMediaPlayerImpl::OnPipelineEnded() { 706 DVLOG(1) << __FUNCTION__; 707 DCHECK(main_task_runner_->BelongsToCurrentThread()); 708 709 // Ignore state changes until we've completed all outstanding seeks. 710 if (seeking_ || pending_seek_) 711 return; 712 713 ended_ = true; 714 client_->timeChanged(); 715 } 716 717 void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error) { 718 DCHECK(main_task_runner_->BelongsToCurrentThread()); 719 DCHECK_NE(error, PIPELINE_OK); 720 721 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) { 722 // Any error that occurs before reaching ReadyStateHaveMetadata should 723 // be considered a format error. 724 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); 725 return; 726 } 727 728 SetNetworkState(PipelineErrorToNetworkState(error)); 729 730 if (error == PIPELINE_ERROR_DECRYPT) 731 encrypted_media_support_->OnPipelineDecryptError(); 732 } 733 734 void WebMediaPlayerImpl::OnPipelineMetadata( 735 PipelineMetadata metadata) { 736 DVLOG(1) << __FUNCTION__; 737 738 pipeline_metadata_ = metadata; 739 740 UMA_HISTOGRAM_ENUMERATION("Media.VideoRotation", 741 metadata.video_rotation, 742 VIDEO_ROTATION_MAX + 1); 743 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); 744 745 if (hasVideo()) { 746 DCHECK(!video_weblayer_); 747 scoped_refptr<cc::VideoLayer> layer = 748 cc::VideoLayer::Create(compositor_, pipeline_metadata_.video_rotation); 749 750 if (pipeline_metadata_.video_rotation == VIDEO_ROTATION_90 || 751 pipeline_metadata_.video_rotation == VIDEO_ROTATION_270) { 752 gfx::Size size = pipeline_metadata_.natural_size; 753 pipeline_metadata_.natural_size = gfx::Size(size.height(), size.width()); 754 } 755 756 video_weblayer_.reset(new cc_blink::WebLayerImpl(layer)); 757 video_weblayer_->setOpaque(opaque_); 758 client_->setWebLayer(video_weblayer_.get()); 759 } 760 } 761 762 void WebMediaPlayerImpl::OnPipelineBufferingStateChanged( 763 BufferingState buffering_state) { 764 DVLOG(1) << __FUNCTION__ << "(" << buffering_state << ")"; 765 766 // Ignore buffering state changes until we've completed all outstanding seeks. 767 if (seeking_ || pending_seek_) 768 return; 769 770 // TODO(scherkus): Handle other buffering states when Pipeline starts using 771 // them and translate them ready state changes http://crbug.com/144683 772 DCHECK_EQ(buffering_state, BUFFERING_HAVE_ENOUGH); 773 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); 774 775 // Blink expects a timeChanged() in response to a seek(). 776 if (should_notify_time_changed_) 777 client_->timeChanged(); 778 } 779 780 void WebMediaPlayerImpl::OnDemuxerOpened() { 781 DCHECK(main_task_runner_->BelongsToCurrentThread()); 782 client_->mediaSourceOpened(new WebMediaSourceImpl( 783 chunk_demuxer_, base::Bind(&LogMediaSourceError, media_log_))); 784 } 785 786 void WebMediaPlayerImpl::OnAddTextTrack( 787 const TextTrackConfig& config, 788 const AddTextTrackDoneCB& done_cb) { 789 DCHECK(main_task_runner_->BelongsToCurrentThread()); 790 791 const WebInbandTextTrackImpl::Kind web_kind = 792 static_cast<WebInbandTextTrackImpl::Kind>(config.kind()); 793 const blink::WebString web_label = 794 blink::WebString::fromUTF8(config.label()); 795 const blink::WebString web_language = 796 blink::WebString::fromUTF8(config.language()); 797 const blink::WebString web_id = 798 blink::WebString::fromUTF8(config.id()); 799 800 scoped_ptr<WebInbandTextTrackImpl> web_inband_text_track( 801 new WebInbandTextTrackImpl(web_kind, web_label, web_language, web_id, 802 text_track_index_++)); 803 804 scoped_ptr<TextTrack> text_track(new TextTrackImpl( 805 main_task_runner_, client_, web_inband_text_track.Pass())); 806 807 done_cb.Run(text_track.Pass()); 808 } 809 810 void WebMediaPlayerImpl::DataSourceInitialized(bool success) { 811 DCHECK(main_task_runner_->BelongsToCurrentThread()); 812 813 if (!success) { 814 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); 815 return; 816 } 817 818 StartPipeline(); 819 } 820 821 void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) { 822 if (!is_downloading && network_state_ == WebMediaPlayer::NetworkStateLoading) 823 SetNetworkState(WebMediaPlayer::NetworkStateIdle); 824 else if (is_downloading && network_state_ == WebMediaPlayer::NetworkStateIdle) 825 SetNetworkState(WebMediaPlayer::NetworkStateLoading); 826 media_log_->AddEvent( 827 media_log_->CreateBooleanEvent( 828 MediaLogEvent::NETWORK_ACTIVITY_SET, 829 "is_downloading_data", is_downloading)); 830 } 831 832 // TODO(xhwang): Move this to a factory class so that we can create different 833 // renderers. 834 scoped_ptr<Renderer> WebMediaPlayerImpl::CreateRenderer() { 835 SetDecryptorReadyCB set_decryptor_ready_cb = 836 encrypted_media_support_->CreateSetDecryptorReadyCB(); 837 838 // Create our audio decoders and renderer. 839 ScopedVector<AudioDecoder> audio_decoders; 840 841 audio_decoders.push_back(new media::FFmpegAudioDecoder( 842 media_task_runner_, base::Bind(&LogMediaSourceError, media_log_))); 843 audio_decoders.push_back(new media::OpusAudioDecoder(media_task_runner_)); 844 845 scoped_ptr<AudioRenderer> audio_renderer( 846 new AudioRendererImpl(media_task_runner_, 847 audio_source_provider_.get(), 848 audio_decoders.Pass(), 849 set_decryptor_ready_cb, 850 audio_hardware_config_, 851 media_log_)); 852 853 // Create our video decoders and renderer. 854 ScopedVector<VideoDecoder> video_decoders; 855 856 if (gpu_factories_.get()) 857 video_decoders.push_back(new GpuVideoDecoder(gpu_factories_)); 858 859 #if !defined(MEDIA_DISABLE_LIBVPX) 860 video_decoders.push_back(new VpxVideoDecoder(media_task_runner_)); 861 #endif // !defined(MEDIA_DISABLE_LIBVPX) 862 863 video_decoders.push_back(new FFmpegVideoDecoder(media_task_runner_)); 864 865 scoped_ptr<VideoRenderer> video_renderer(new VideoRendererImpl( 866 media_task_runner_, 867 video_decoders.Pass(), 868 set_decryptor_ready_cb, 869 base::Bind(&WebMediaPlayerImpl::FrameReady, base::Unretained(this)), 870 true, 871 media_log_)); 872 873 // Create renderer. 874 return scoped_ptr<Renderer>(new RendererImpl( 875 media_task_runner_, 876 demuxer_.get(), 877 audio_renderer.Pass(), 878 video_renderer.Pass())); 879 } 880 881 void WebMediaPlayerImpl::StartPipeline() { 882 DCHECK(main_task_runner_->BelongsToCurrentThread()); 883 884 // Keep track if this is a MSE or non-MSE playback. 885 UMA_HISTOGRAM_BOOLEAN("Media.MSE.Playback", 886 (load_type_ == LoadTypeMediaSource)); 887 888 LogCB mse_log_cb; 889 Demuxer::NeedKeyCB need_key_cb = 890 encrypted_media_support_->CreateNeedKeyCB(); 891 892 // Figure out which demuxer to use. 893 if (load_type_ != LoadTypeMediaSource) { 894 DCHECK(!chunk_demuxer_); 895 DCHECK(data_source_); 896 897 demuxer_.reset(new FFmpegDemuxer( 898 media_task_runner_, data_source_.get(), 899 need_key_cb, 900 media_log_)); 901 } else { 902 DCHECK(!chunk_demuxer_); 903 DCHECK(!data_source_); 904 905 mse_log_cb = base::Bind(&LogMediaSourceError, media_log_); 906 907 chunk_demuxer_ = new ChunkDemuxer( 908 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDemuxerOpened), 909 need_key_cb, 910 mse_log_cb, 911 true); 912 demuxer_.reset(chunk_demuxer_); 913 } 914 915 // ... and we're ready to go! 916 seeking_ = true; 917 pipeline_.Start( 918 demuxer_.get(), 919 CreateRenderer(), 920 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineEnded), 921 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineError), 922 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked, false), 923 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineMetadata), 924 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineBufferingStateChanged), 925 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDurationChanged), 926 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnAddTextTrack)); 927 } 928 929 void WebMediaPlayerImpl::SetNetworkState(WebMediaPlayer::NetworkState state) { 930 DVLOG(1) << __FUNCTION__ << "(" << state << ")"; 931 DCHECK(main_task_runner_->BelongsToCurrentThread()); 932 network_state_ = state; 933 // Always notify to ensure client has the latest value. 934 client_->networkStateChanged(); 935 } 936 937 void WebMediaPlayerImpl::SetReadyState(WebMediaPlayer::ReadyState state) { 938 DVLOG(1) << __FUNCTION__ << "(" << state << ")"; 939 DCHECK(main_task_runner_->BelongsToCurrentThread()); 940 941 if (state == WebMediaPlayer::ReadyStateHaveEnoughData && data_source_ && 942 data_source_->assume_fully_buffered() && 943 network_state_ == WebMediaPlayer::NetworkStateLoading) 944 SetNetworkState(WebMediaPlayer::NetworkStateLoaded); 945 946 ready_state_ = state; 947 // Always notify to ensure client has the latest value. 948 client_->readyStateChanged(); 949 } 950 951 blink::WebAudioSourceProvider* WebMediaPlayerImpl::audioSourceProvider() { 952 return audio_source_provider_.get(); 953 } 954 955 double WebMediaPlayerImpl::GetPipelineDuration() const { 956 base::TimeDelta duration = pipeline_.GetMediaDuration(); 957 958 // Return positive infinity if the resource is unbounded. 959 // http://www.whatwg.org/specs/web-apps/current-work/multipage/video.html#dom-media-duration 960 if (duration == kInfiniteDuration()) 961 return std::numeric_limits<double>::infinity(); 962 963 return duration.InSecondsF(); 964 } 965 966 void WebMediaPlayerImpl::OnDurationChanged() { 967 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) 968 return; 969 970 client_->durationChanged(); 971 } 972 973 void WebMediaPlayerImpl::OnNaturalSizeChanged(gfx::Size size) { 974 DCHECK(main_task_runner_->BelongsToCurrentThread()); 975 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); 976 TRACE_EVENT0("media", "WebMediaPlayerImpl::OnNaturalSizeChanged"); 977 978 media_log_->AddEvent( 979 media_log_->CreateVideoSizeSetEvent(size.width(), size.height())); 980 pipeline_metadata_.natural_size = size; 981 982 client_->sizeChanged(); 983 } 984 985 void WebMediaPlayerImpl::OnOpacityChanged(bool opaque) { 986 DCHECK(main_task_runner_->BelongsToCurrentThread()); 987 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing); 988 989 opaque_ = opaque; 990 if (video_weblayer_) 991 video_weblayer_->setOpaque(opaque_); 992 } 993 994 void WebMediaPlayerImpl::FrameReady( 995 const scoped_refptr<VideoFrame>& frame) { 996 compositor_task_runner_->PostTask( 997 FROM_HERE, 998 base::Bind(&VideoFrameCompositor::UpdateCurrentFrame, 999 base::Unretained(compositor_), 1000 frame)); 1001 } 1002 1003 static void GetCurrentFrameAndSignal( 1004 VideoFrameCompositor* compositor, 1005 scoped_refptr<VideoFrame>* video_frame_out, 1006 base::WaitableEvent* event) { 1007 TRACE_EVENT0("media", "GetCurrentFrameAndSignal"); 1008 *video_frame_out = compositor->GetCurrentFrame(); 1009 event->Signal(); 1010 } 1011 1012 scoped_refptr<VideoFrame> 1013 WebMediaPlayerImpl::GetCurrentFrameFromCompositor() { 1014 TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor"); 1015 if (compositor_task_runner_->BelongsToCurrentThread()) 1016 return compositor_->GetCurrentFrame(); 1017 1018 // Use a posted task and waitable event instead of a lock otherwise 1019 // WebGL/Canvas can see different content than what the compositor is seeing. 1020 scoped_refptr<VideoFrame> video_frame; 1021 base::WaitableEvent event(false, false); 1022 compositor_task_runner_->PostTask(FROM_HERE, 1023 base::Bind(&GetCurrentFrameAndSignal, 1024 base::Unretained(compositor_), 1025 &video_frame, 1026 &event)); 1027 event.Wait(); 1028 return video_frame; 1029 } 1030 1031 } // namespace media 1032