Home | History | Annotate | Download | only in resources
      1 // Copyright 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "cc/resources/picture_pile.h"
      6 
      7 #include <algorithm>
      8 #include <limits>
      9 #include <vector>
     10 
     11 #include "cc/base/region.h"
     12 #include "cc/debug/rendering_stats_instrumentation.h"
     13 #include "cc/resources/picture_pile_impl.h"
     14 #include "cc/resources/raster_worker_pool.h"
     15 #include "cc/resources/tile_priority.h"
     16 
     17 namespace {
     18 // Layout pixel buffer around the visible layer rect to record.  Any base
     19 // picture that intersects the visible layer rect expanded by this distance
     20 // will be recorded.
     21 const int kPixelDistanceToRecord = 8000;
     22 
     23 // TODO(humper): The density threshold here is somewhat arbitrary; need a
     24 // way to set // this from the command line so we can write a benchmark
     25 // script and find a sweet spot.
     26 const float kDensityThreshold = 0.5f;
     27 
     28 bool rect_sort_y(const gfx::Rect &r1, const gfx::Rect &r2) {
     29   return r1.y() < r2.y() || (r1.y() == r2.y() && r1.x() < r2.x());
     30 }
     31 
     32 bool rect_sort_x(const gfx::Rect &r1, const gfx::Rect &r2) {
     33   return r1.x() < r2.x() || (r1.x() == r2.x() && r1.y() < r2.y());
     34 }
     35 
     36 float do_clustering(const std::vector<gfx::Rect>& tiles,
     37                     std::vector<gfx::Rect>* clustered_rects) {
     38   // These variables track the record area and invalid area
     39   // for the entire clustering
     40   int total_record_area = 0;
     41   int total_invalid_area = 0;
     42 
     43   // These variables track the record area and invalid area
     44   // for the current cluster being constructed.
     45   gfx::Rect cur_record_rect;
     46   int cluster_record_area = 0, cluster_invalid_area = 0;
     47 
     48   for (std::vector<gfx::Rect>::const_iterator it = tiles.begin();
     49         it != tiles.end();
     50         it++) {
     51     gfx::Rect invalid_tile = *it;
     52 
     53     // For each tile, we consider adding the invalid tile to the
     54     // current record rectangle.  Only add it if the amount of empty
     55     // space created is below a density threshold.
     56     int tile_area = invalid_tile.width() * invalid_tile.height();
     57 
     58     gfx::Rect proposed_union = cur_record_rect;
     59     proposed_union.Union(invalid_tile);
     60     int proposed_area = proposed_union.width() * proposed_union.height();
     61     float proposed_density =
     62       static_cast<float>(cluster_invalid_area + tile_area) /
     63       static_cast<float>(proposed_area);
     64 
     65     if (proposed_density >= kDensityThreshold) {
     66       // It's okay to add this invalid tile to the
     67       // current recording rectangle.
     68       cur_record_rect = proposed_union;
     69       cluster_record_area = proposed_area;
     70       cluster_invalid_area += tile_area;
     71       total_invalid_area += tile_area;
     72     } else {
     73       // Adding this invalid tile to the current recording rectangle
     74       // would exceed our badness threshold, so put the current rectangle
     75       // in the list of recording rects, and start a new one.
     76       clustered_rects->push_back(cur_record_rect);
     77       total_record_area += cluster_record_area;
     78       cur_record_rect = invalid_tile;
     79       cluster_invalid_area = tile_area;
     80       cluster_record_area = tile_area;
     81     }
     82   }
     83 
     84   DCHECK(!cur_record_rect.IsEmpty());
     85   clustered_rects->push_back(cur_record_rect);
     86   total_record_area += cluster_record_area;;
     87 
     88   DCHECK_NE(total_record_area, 0);
     89 
     90   return static_cast<float>(total_invalid_area) /
     91          static_cast<float>(total_record_area);
     92   }
     93 
     94 float ClusterTiles(const std::vector<gfx::Rect>& invalid_tiles,
     95                    std::vector<gfx::Rect>* record_rects) {
     96   TRACE_EVENT1("cc", "ClusterTiles",
     97                "count",
     98                invalid_tiles.size());
     99 
    100   if (invalid_tiles.size() <= 1) {
    101     // Quickly handle the special case for common
    102     // single-invalidation update, and also the less common
    103     // case of no tiles passed in.
    104     *record_rects = invalid_tiles;
    105     return 1;
    106   }
    107 
    108   // Sort the invalid tiles by y coordinate.
    109   std::vector<gfx::Rect> invalid_tiles_vertical = invalid_tiles;
    110   std::sort(invalid_tiles_vertical.begin(),
    111             invalid_tiles_vertical.end(),
    112             rect_sort_y);
    113 
    114   float vertical_density;
    115   std::vector<gfx::Rect> vertical_clustering;
    116   vertical_density = do_clustering(invalid_tiles_vertical,
    117                                    &vertical_clustering);
    118 
    119   // Now try again with a horizontal sort, see which one is best
    120   // TODO(humper): Heuristics for skipping this step?
    121   std::vector<gfx::Rect> invalid_tiles_horizontal = invalid_tiles;
    122   std::sort(invalid_tiles_vertical.begin(),
    123             invalid_tiles_vertical.end(),
    124             rect_sort_x);
    125 
    126   float horizontal_density;
    127   std::vector<gfx::Rect> horizontal_clustering;
    128   horizontal_density = do_clustering(invalid_tiles_vertical,
    129                                      &horizontal_clustering);
    130 
    131   if (vertical_density < horizontal_density) {
    132     *record_rects = horizontal_clustering;
    133     return horizontal_density;
    134   }
    135 
    136   *record_rects = vertical_clustering;
    137   return vertical_density;
    138 }
    139 
    140 }  // namespace
    141 
    142 namespace cc {
    143 
    144 PicturePile::PicturePile() : is_suitable_for_gpu_rasterization_(true) {}
    145 
    146 PicturePile::~PicturePile() {
    147 }
    148 
    149 bool PicturePile::UpdateAndExpandInvalidation(
    150     ContentLayerClient* painter,
    151     Region* invalidation,
    152     SkColor background_color,
    153     bool contents_opaque,
    154     bool contents_fill_bounds_completely,
    155     const gfx::Rect& visible_layer_rect,
    156     int frame_number,
    157     Picture::RecordingMode recording_mode,
    158     RenderingStatsInstrumentation* stats_instrumentation) {
    159   background_color_ = background_color;
    160   contents_opaque_ = contents_opaque;
    161   contents_fill_bounds_completely_ = contents_fill_bounds_completely;
    162 
    163   gfx::Rect interest_rect = visible_layer_rect;
    164   interest_rect.Inset(
    165       -kPixelDistanceToRecord,
    166       -kPixelDistanceToRecord,
    167       -kPixelDistanceToRecord,
    168       -kPixelDistanceToRecord);
    169   recorded_viewport_ = interest_rect;
    170   recorded_viewport_.Intersect(tiling_rect());
    171 
    172   gfx::Rect interest_rect_over_tiles =
    173       tiling_.ExpandRectToTileBounds(interest_rect);
    174 
    175   Region invalidation_expanded_to_full_tiles;
    176 
    177   bool invalidated = false;
    178   for (Region::Iterator i(*invalidation); i.has_rect(); i.next()) {
    179     gfx::Rect invalid_rect = i.rect();
    180     // Split this inflated invalidation across tile boundaries and apply it
    181     // to all tiles that it touches.
    182     bool include_borders = true;
    183     for (TilingData::Iterator iter(&tiling_, invalid_rect, include_borders);
    184          iter;
    185          ++iter) {
    186       const PictureMapKey& key = iter.index();
    187 
    188       PictureMap::iterator picture_it = picture_map_.find(key);
    189       if (picture_it == picture_map_.end())
    190         continue;
    191 
    192       // Inform the grid cell that it has been invalidated in this frame.
    193       invalidated = picture_it->second.Invalidate(frame_number) || invalidated;
    194     }
    195 
    196     // Expand invalidation that is outside tiles that intersect the interest
    197     // rect. These tiles are no longer valid and should be considerered fully
    198     // invalid, so we can know to not keep around raster tiles that intersect
    199     // with these recording tiles.
    200     gfx::Rect invalid_rect_outside_interest_rect_tiles = invalid_rect;
    201     // TODO(danakj): We should have a Rect-subtract-Rect-to-2-rects operator
    202     // instead of using Rect::Subtract which gives you the bounding box of the
    203     // subtraction.
    204     invalid_rect_outside_interest_rect_tiles.Subtract(interest_rect_over_tiles);
    205     invalidation_expanded_to_full_tiles.Union(tiling_.ExpandRectToTileBounds(
    206         invalid_rect_outside_interest_rect_tiles));
    207   }
    208 
    209   invalidation->Union(invalidation_expanded_to_full_tiles);
    210 
    211   // Make a list of all invalid tiles; we will attempt to
    212   // cluster these into multiple invalidation regions.
    213   std::vector<gfx::Rect> invalid_tiles;
    214   bool include_borders = true;
    215   for (TilingData::Iterator it(&tiling_, interest_rect, include_borders); it;
    216        ++it) {
    217     const PictureMapKey& key = it.index();
    218     PictureInfo& info = picture_map_[key];
    219 
    220     gfx::Rect rect = PaddedRect(key);
    221     int distance_to_visible =
    222         rect.ManhattanInternalDistance(visible_layer_rect);
    223 
    224     if (info.NeedsRecording(frame_number, distance_to_visible)) {
    225       gfx::Rect tile = tiling_.TileBounds(key.first, key.second);
    226       invalid_tiles.push_back(tile);
    227     } else if (!info.GetPicture()) {
    228       if (recorded_viewport_.Intersects(rect)) {
    229         // Recorded viewport is just an optimization for a fully recorded
    230         // interest rect.  In this case, a tile in that rect has declined
    231         // to be recorded (probably due to frequent invalidations).
    232         // TODO(enne): Shrink the recorded_viewport_ rather than clearing.
    233         recorded_viewport_ = gfx::Rect();
    234       }
    235 
    236       // If a tile in the interest rect is not recorded, the entire tile needs
    237       // to be considered invalid, so that we know not to keep around raster
    238       // tiles that intersect this recording tile.
    239       invalidation->Union(tiling_.TileBounds(it.index_x(), it.index_y()));
    240     }
    241   }
    242 
    243   std::vector<gfx::Rect> record_rects;
    244   ClusterTiles(invalid_tiles, &record_rects);
    245 
    246   if (record_rects.empty())
    247     return invalidated;
    248 
    249   for (std::vector<gfx::Rect>::iterator it = record_rects.begin();
    250        it != record_rects.end();
    251        it++) {
    252     gfx::Rect record_rect = *it;
    253     record_rect = PadRect(record_rect);
    254 
    255     int repeat_count = std::max(1, slow_down_raster_scale_factor_for_debug_);
    256     scoped_refptr<Picture> picture;
    257     int num_raster_threads = RasterWorkerPool::GetNumRasterThreads();
    258 
    259     // Note: Currently, gathering of pixel refs when using a single
    260     // raster thread doesn't provide any benefit. This might change
    261     // in the future but we avoid it for now to reduce the cost of
    262     // Picture::Create.
    263     bool gather_pixel_refs = num_raster_threads > 1;
    264 
    265     {
    266       base::TimeDelta best_duration = base::TimeDelta::Max();
    267       for (int i = 0; i < repeat_count; i++) {
    268         base::TimeTicks start_time = stats_instrumentation->StartRecording();
    269         picture = Picture::Create(record_rect,
    270                                   painter,
    271                                   tile_grid_info_,
    272                                   gather_pixel_refs,
    273                                   num_raster_threads,
    274                                   recording_mode);
    275         // Note the '&&' with previous is-suitable state.
    276         // This means that once a picture-pile becomes unsuitable for gpu
    277         // rasterization due to some content, it will continue to be unsuitable
    278         // even if that content is replaced by gpu-friendly content.
    279         // This is an optimization to avoid iterating though all pictures in
    280         // the pile after each invalidation.
    281         is_suitable_for_gpu_rasterization_ &=
    282             picture->IsSuitableForGpuRasterization();
    283         base::TimeDelta duration =
    284             stats_instrumentation->EndRecording(start_time);
    285         best_duration = std::min(duration, best_duration);
    286       }
    287       int recorded_pixel_count =
    288           picture->LayerRect().width() * picture->LayerRect().height();
    289       stats_instrumentation->AddRecord(best_duration, recorded_pixel_count);
    290     }
    291 
    292     bool found_tile_for_recorded_picture = false;
    293 
    294     bool include_borders = true;
    295     for (TilingData::Iterator it(&tiling_, record_rect, include_borders); it;
    296          ++it) {
    297       const PictureMapKey& key = it.index();
    298       gfx::Rect tile = PaddedRect(key);
    299       if (record_rect.Contains(tile)) {
    300         PictureInfo& info = picture_map_[key];
    301         info.SetPicture(picture);
    302         found_tile_for_recorded_picture = true;
    303       }
    304     }
    305     DCHECK(found_tile_for_recorded_picture);
    306   }
    307 
    308   has_any_recordings_ = true;
    309   DCHECK(CanRasterSlowTileCheck(recorded_viewport_));
    310   return true;
    311 }
    312 
    313 }  // namespace cc
    314