Home | History | Annotate | Download | only in core
      1 /*
      2  * Copyright 2017 Google Inc.
      3  *
      4  * Use of this source code is governed by a BSD-style license that can be
      5  * found in the LICENSE file.
      6  */
      7 
      8 #ifndef SkCoverageDelta_DEFINED
      9 #define SkCoverageDelta_DEFINED
     10 
     11 #include "SkArenaAlloc.h"
     12 #include "SkFixed.h"
     13 #include "SkMask.h"
     14 #include "SkTSort.h"
     15 #include "SkUtils.h"
     16 
     17 // Future todo: maybe we can make fX and fDelta 16-bit long to speed it up a little bit.
     18 struct SkCoverageDelta {
     19     int     fX;     // the y coordinate will be implied in SkCoverageDeltaList
     20     SkFixed fDelta; // the amount that the alpha changed
     21 
     22     // Sort according to fX
     23     bool operator<(const SkCoverageDelta& other) const {
     24         return fX < other.fX;
     25     }
     26 };
     27 
     28 // All the arguments needed for SkBlitter::blitAntiRect
     29 struct SkAntiRect {
     30     int     fX;
     31     int     fY;
     32     int     fWidth;
     33     int     fHeight;
     34     SkAlpha fLeftAlpha;
     35     SkAlpha fRightAlpha;
     36 };
     37 
     38 // A list of SkCoverageDelta with y from top() to bottom().
     39 // For each row y, there are count(y) number of deltas.
     40 // You can ask whether they are sorted or not by sorted(y), and you can sort them by sort(y).
     41 // Once sorted, getDelta(y, i) should return the i-th leftmost delta on row y.
     42 class SkCoverageDeltaList {
     43 public:
     44     // We can store INIT_ROW_SIZE deltas per row (i.e., per y-scanline) initially.
     45 #ifdef SK_BUILD_FOR_GOOGLE3
     46     static constexpr int INIT_ROW_SIZE = 8; // google3 has 16k stack limit; so we make it small
     47 #else
     48     static constexpr int INIT_ROW_SIZE = 32;
     49 #endif
     50 
     51     SkCoverageDeltaList(SkArenaAlloc* alloc, const SkIRect& bounds, bool forceRLE);
     52 
     53     int  top() const { return fBounds.fTop; }
     54     int  bottom() const { return fBounds.fBottom; }
     55     int  left() const { return fBounds.fLeft; }
     56     int  right() const { return fBounds.fRight; }
     57     bool forceRLE() const { return fForceRLE; }
     58     int  count(int y) const { this->checkY(y); return fCounts[y]; }
     59     bool sorted(int y) const { this->checkY(y); return fSorted[y]; }
     60 
     61     SK_ALWAYS_INLINE void addDelta(int x, int y, SkFixed delta) { this->push_back(y, {x, delta}); }
     62     SK_ALWAYS_INLINE const SkCoverageDelta& getDelta(int y, int i) const {
     63         this->checkY(y);
     64         SkASSERT(i < fCounts[y]);
     65         return fRows[y][i];
     66     }
     67 
     68     // It might be better to sort right before blitting to make the memory hot
     69     void sort(int y) {
     70         this->checkY(y);
     71         if (!fSorted[y]) {
     72             SkTQSort(fRows[y], fRows[y] + fCounts[y] - 1);
     73             fSorted[y] = true;
     74         }
     75     }
     76 
     77     const SkAntiRect& getAntiRect() const { return fAntiRect; }
     78     void setAntiRect(int x, int y, int width, int height,
     79             SkAlpha leftAlpha, SkAlpha rightAlpha) {
     80         fAntiRect = {x, y, width, height, leftAlpha, rightAlpha};
     81     }
     82 
     83 private:
     84     SkArenaAlloc*               fAlloc;
     85     SkCoverageDelta**           fRows;
     86     bool*                       fSorted;
     87     int*                        fCounts;
     88     int*                        fMaxCounts;
     89     SkIRect                     fBounds;
     90     SkAntiRect                  fAntiRect;
     91     bool                        fForceRLE;
     92 
     93     void checkY(int y) const { SkASSERT(y >= fBounds.fTop && y < fBounds.fBottom); }
     94 
     95     SK_ALWAYS_INLINE void push_back(int y, const SkCoverageDelta& delta) {
     96         this->checkY(y);
     97         if (fCounts[y] == fMaxCounts[y]) {
     98             fMaxCounts[y] *= 4;
     99             SkCoverageDelta* newRow = fAlloc->makeArrayDefault<SkCoverageDelta>(fMaxCounts[y]);
    100             memcpy(newRow, fRows[y], sizeof(SkCoverageDelta) * fCounts[y]);
    101             fRows[y] = newRow;
    102         }
    103         SkASSERT(fCounts[y] < fMaxCounts[y]);
    104         fRows[y][fCounts[y]++] = delta;
    105         fSorted[y] = fSorted[y] && (fCounts[y] == 1 || delta.fX >= fRows[y][fCounts[y] - 2].fX);
    106     }
    107 };
    108 
    109 class SkCoverageDeltaMask {
    110 public:
    111     // 3 for precision error, 1 for boundary delta (e.g., -SK_Fixed1 at fBounds.fRight + 1)
    112     static constexpr int PADDING        = 4;
    113 
    114     static constexpr int SIMD_WIDTH     = 8;
    115     static constexpr int SUITABLE_WIDTH = 32;
    116 #ifdef SK_BUILD_FOR_GOOGLE3
    117     static constexpr int MAX_MASK_SIZE  = 1024; // G3 has 16k stack limit based on -fstack-usage
    118 #else
    119     static constexpr int MAX_MASK_SIZE  = 2048;
    120 #endif
    121     static constexpr int MAX_SIZE       = MAX_MASK_SIZE * (sizeof(SkFixed) + sizeof(SkAlpha));
    122 
    123     // Expand PADDING on both sides, and make it a multiple of SIMD_WIDTH
    124     static int  ExpandWidth(int width);
    125     static bool CanHandle(const SkIRect& bounds);   // whether bounds fits into MAX_MASK_SIZE
    126     static bool Suitable(const SkIRect& bounds);    // CanHandle(bounds) && width <= SUITABLE_WIDTH
    127 
    128     SkCoverageDeltaMask(SkArenaAlloc* alloc, const SkIRect& bounds);
    129 
    130     int              top()       const { return fBounds.fTop; }
    131     int              bottom()    const { return fBounds.fBottom; }
    132     SkAlpha*         getMask()         { return fMask; }
    133     const SkIRect&   getBounds() const { return fBounds; }
    134 
    135     SK_ALWAYS_INLINE void addDelta (int x, int y, SkFixed delta) { this->delta(x, y) += delta; }
    136     SK_ALWAYS_INLINE SkFixed& delta (int x, int y) {
    137         this->checkX(x);
    138         this->checkY(y);
    139         return fDeltas[this->index(x, y)];
    140     }
    141 
    142     void setAntiRect(int x, int y, int width, int height,
    143                             SkAlpha leftAlpha, SkAlpha rightAlpha) {
    144         fAntiRect = {x, y, width, height, leftAlpha, rightAlpha};
    145     }
    146 
    147     SkMask prepareSkMask() {
    148         SkMask mask;
    149         mask.fImage     = fMask;
    150         mask.fBounds    = fBounds;
    151         mask.fRowBytes  = fBounds.width();
    152         mask.fFormat    = SkMask::kA8_Format;
    153         return mask;
    154     }
    155 
    156     void convertCoverageToAlpha(bool isEvenOdd, bool isInverse, bool isConvex);
    157 
    158 private:
    159     SkIRect     fBounds;
    160     SkFixed*    fDeltaStorage;
    161     SkFixed*    fDeltas;
    162     SkAlpha*    fMask;
    163     int         fExpandedWidth;
    164     SkAntiRect  fAntiRect;
    165 
    166     SK_ALWAYS_INLINE int index(int x, int y) const { return y * fExpandedWidth + x; }
    167 
    168     void checkY(int y) const { SkASSERT(y >= fBounds.fTop && y < fBounds.fBottom); }
    169     void checkX(int x) const {
    170         SkASSERT(x >= fBounds.fLeft - PADDING && x < fBounds.fRight + PADDING);
    171     }
    172 };
    173 
    174 static SK_ALWAYS_INLINE SkAlpha CoverageToAlpha(SkFixed coverage, bool isEvenOdd, bool isInverse) {
    175     SkAlpha result;
    176     if (isEvenOdd) {
    177         SkFixed mod17 = coverage & 0x1ffff;
    178         SkFixed mod16 = coverage & 0xffff;
    179         result = SkTPin(SkAbs32((mod16 << 1) - mod17) >> 8, 0, 255);
    180     } else {
    181         result = SkTPin(SkAbs32(coverage) >> 8, 0, 255);
    182     }
    183     return isInverse ? 255 - result : result;
    184 }
    185 
    186 struct SkDAARecord {
    187     enum class Type {
    188         kToBeComputed,
    189         kMask,
    190         kList,
    191         kEmpty
    192     } fType;
    193 
    194     SkMask               fMask;
    195     SkCoverageDeltaList* fList;
    196     SkArenaAlloc*        fAlloc;
    197 
    198     SkDAARecord(SkArenaAlloc* alloc) : fType(Type::kToBeComputed), fAlloc(alloc) {}
    199 
    200     // When the scan converter returns early (e.g., the path is completely out of the clip), we set
    201     // the type to empty to signal that the record has been computed and it's empty. This is
    202     // required only for DEBUG where we check that the type must not be kToBeComputed after
    203     // init-once.
    204     void setEmpty() { fType = Type::kEmpty; }
    205     static inline void SetEmpty(SkDAARecord* record) { // record may be nullptr
    206 #ifdef SK_DEBUG
    207         // If type != kToBeComputed, then we're in the draw phase and we shouldn't set it to empty
    208         // because being empty in one tile does not imply emptiness in other tiles.
    209         if (record && record->fType == Type::kToBeComputed) {
    210             record->setEmpty();
    211         }
    212 #endif
    213     }
    214 };
    215 
    216 template<typename T>
    217 static SK_ALWAYS_INLINE T CoverageToAlpha(const T&  coverage, bool isEvenOdd, bool isInverse) {
    218     T t0(0), t255(255);
    219     T result;
    220     if (isEvenOdd) {
    221         T mod17 = coverage & 0x1ffff;
    222         T mod16 = coverage & 0xffff;
    223         result = ((mod16 << 1) - mod17).abs() >> 8;
    224     } else {
    225         result = coverage.abs() >> 8;
    226     }
    227     result = T::Min(result, t255);
    228     result = T::Max(result, t0);
    229     return isInverse ? 255 - result : result;
    230 }
    231 
    232 // For convex paths (including inverse mode), the coverage is guaranteed to be
    233 // between [-SK_Fixed1, SK_Fixed1] so we can skip isEvenOdd and SkTPin.
    234 static SK_ALWAYS_INLINE SkAlpha ConvexCoverageToAlpha(SkFixed coverage, bool isInverse) {
    235     SkASSERT(coverage >= -SK_Fixed1 && coverage <= SK_Fixed1);
    236     int result = SkAbs32(coverage) >> 8;
    237     result -= (result >> 8); // 256 to 255
    238     return isInverse ? 255 - result : result;
    239 }
    240 
    241 template<typename T>
    242 static SK_ALWAYS_INLINE T ConvexCoverageToAlpha(const T& coverage, bool isInverse) {
    243     // allTrue is not implemented
    244     // SkASSERT((coverage >= 0).allTrue() && (coverage <= SK_Fixed1).allTrue());
    245     T result = coverage.abs() >> 8;
    246     result -= (result >> 8); // 256 to 255
    247     return isInverse ? 255 - result : result;
    248 }
    249 
    250 #endif
    251