Home | History | Annotate | Download | only in tests
      1 /*
      2  * Copyright 2015 Google Inc.
      3  *
      4  * Use of this source code is governed by a BSD-style license that can be
      5  * found in the LICENSE file.
      6  */
      7 
      8 // This is a GPU-backend specific test. It relies on static intializers to work
      9 
     10 #include "SkTypes.h"
     11 
     12 #if SK_SUPPORT_GPU && defined(SK_VULKAN)
     13 
     14 #include "GrContextPriv.h"
     15 #include "GrContextFactory.h"
     16 #include "GrTest.h"
     17 #include "Test.h"
     18 #include "vk/GrVkGpu.h"
     19 
     20 using sk_gpu_test::GrContextFactory;
     21 
     22 void subheap_test(skiatest::Reporter* reporter, GrContext* context) {
     23     GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu());
     24 
     25     // memtype doesn't matter, we're just testing the suballocation algorithm so we'll use 0
     26     GrVkSubHeap heap(gpu, 0, 0, 64 * 1024, 32);
     27     GrVkAlloc alloc0, alloc1, alloc2, alloc3;
     28     // test full allocation and free
     29     REPORTER_ASSERT(reporter, heap.alloc(64 * 1024, &alloc0));
     30     REPORTER_ASSERT(reporter, alloc0.fOffset == 0);
     31     REPORTER_ASSERT(reporter, alloc0.fSize == 64 * 1024);
     32     REPORTER_ASSERT(reporter, heap.freeSize() == 0 && heap.largestBlockSize() == 0);
     33     heap.free(alloc0);
     34     REPORTER_ASSERT(reporter, heap.freeSize() == 64*1024 && heap.largestBlockSize() == 64 * 1024);
     35 
     36     // now let's suballoc some memory
     37     REPORTER_ASSERT(reporter, heap.alloc(16 * 1024, &alloc0));
     38     REPORTER_ASSERT(reporter, heap.alloc(23 * 1024, &alloc1));
     39     REPORTER_ASSERT(reporter, heap.alloc(18 * 1024, &alloc2));
     40     REPORTER_ASSERT(reporter, heap.freeSize() == 7 * 1024 && heap.largestBlockSize() == 7 * 1024);
     41     // free lone block
     42     heap.free(alloc1);
     43     REPORTER_ASSERT(reporter, heap.freeSize() == 30 * 1024 && heap.largestBlockSize() == 23 * 1024);
     44     // allocate into smallest free block
     45     REPORTER_ASSERT(reporter, heap.alloc(6 * 1024, &alloc3));
     46     REPORTER_ASSERT(reporter, heap.freeSize() == 24 * 1024 && heap.largestBlockSize() == 23 * 1024);
     47     // allocate into exact size free block
     48     REPORTER_ASSERT(reporter, heap.alloc(23 * 1024, &alloc1));
     49     REPORTER_ASSERT(reporter, heap.freeSize() == 1 * 1024 && heap.largestBlockSize() == 1 * 1024);
     50     // free lone block
     51     heap.free(alloc2);
     52     REPORTER_ASSERT(reporter, heap.freeSize() == 19 * 1024 && heap.largestBlockSize() == 18 * 1024);
     53     // free and merge with preceding block and following
     54     heap.free(alloc3);
     55     REPORTER_ASSERT(reporter, heap.freeSize() == 25 * 1024 && heap.largestBlockSize() == 25 * 1024);
     56     // free and merge with following block
     57     heap.free(alloc1);
     58     REPORTER_ASSERT(reporter, heap.freeSize() == 48 * 1024 && heap.largestBlockSize() == 48 * 1024);
     59     // free starting block and merge with following
     60     heap.free(alloc0);
     61     REPORTER_ASSERT(reporter, heap.freeSize() == 64 * 1024 && heap.largestBlockSize() == 64 * 1024);
     62 
     63     // realloc
     64     REPORTER_ASSERT(reporter, heap.alloc(4 * 1024, &alloc0));
     65     REPORTER_ASSERT(reporter, heap.alloc(35 * 1024, &alloc1));
     66     REPORTER_ASSERT(reporter, heap.alloc(10 * 1024, &alloc2));
     67     REPORTER_ASSERT(reporter, heap.freeSize() == 15 * 1024 && heap.largestBlockSize() == 15 * 1024);
     68     // free starting block and merge with following
     69     heap.free(alloc0);
     70     REPORTER_ASSERT(reporter, heap.freeSize() == 19 * 1024 && heap.largestBlockSize() == 15 * 1024);
     71     // free block and merge with preceding
     72     heap.free(alloc1);
     73     REPORTER_ASSERT(reporter, heap.freeSize() == 54 * 1024 && heap.largestBlockSize() == 39 * 1024);
     74     // free block and merge with preceding and following
     75     heap.free(alloc2);
     76     REPORTER_ASSERT(reporter, heap.freeSize() == 64 * 1024 && heap.largestBlockSize() == 64 * 1024);
     77 
     78     // fragment
     79     REPORTER_ASSERT(reporter, heap.alloc(19 * 1024, &alloc0));
     80     REPORTER_ASSERT(reporter, heap.alloc(5 * 1024, &alloc1));
     81     REPORTER_ASSERT(reporter, heap.alloc(15 * 1024, &alloc2));
     82     REPORTER_ASSERT(reporter, heap.alloc(3 * 1024, &alloc3));
     83     REPORTER_ASSERT(reporter, heap.freeSize() == 22 * 1024 && heap.largestBlockSize() == 22 * 1024);
     84     heap.free(alloc0);
     85     REPORTER_ASSERT(reporter, heap.freeSize() == 41 * 1024 && heap.largestBlockSize() == 22 * 1024);
     86     heap.free(alloc2);
     87     REPORTER_ASSERT(reporter, heap.freeSize() == 56 * 1024 && heap.largestBlockSize() == 22 * 1024);
     88     REPORTER_ASSERT(reporter, !heap.alloc(40 * 1024, &alloc0));
     89     heap.free(alloc3);
     90     REPORTER_ASSERT(reporter, heap.freeSize() == 59 * 1024 && heap.largestBlockSize() == 40 * 1024);
     91     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, &alloc0));
     92     REPORTER_ASSERT(reporter, heap.freeSize() == 19 * 1024 && heap.largestBlockSize() == 19 * 1024);
     93     heap.free(alloc1);
     94     REPORTER_ASSERT(reporter, heap.freeSize() == 24 * 1024 && heap.largestBlockSize() == 24 * 1024);
     95     heap.free(alloc0);
     96     REPORTER_ASSERT(reporter, heap.freeSize() == 64 * 1024 && heap.largestBlockSize() == 64 * 1024);
     97 
     98     // unaligned sizes
     99     REPORTER_ASSERT(reporter, heap.alloc(19 * 1024 - 31, &alloc0));
    100     REPORTER_ASSERT(reporter, heap.alloc(5 * 1024 - 5, &alloc1));
    101     REPORTER_ASSERT(reporter, heap.alloc(15 * 1024 - 19, &alloc2));
    102     REPORTER_ASSERT(reporter, heap.alloc(3 * 1024 - 3, &alloc3));
    103     REPORTER_ASSERT(reporter, heap.freeSize() == 22 * 1024 && heap.largestBlockSize() == 22 * 1024);
    104     heap.free(alloc0);
    105     REPORTER_ASSERT(reporter, heap.freeSize() == 41 * 1024 && heap.largestBlockSize() == 22 * 1024);
    106     heap.free(alloc2);
    107     REPORTER_ASSERT(reporter, heap.freeSize() == 56 * 1024 && heap.largestBlockSize() == 22 * 1024);
    108     REPORTER_ASSERT(reporter, !heap.alloc(40 * 1024, &alloc0));
    109     heap.free(alloc3);
    110     REPORTER_ASSERT(reporter, heap.freeSize() == 59 * 1024 && heap.largestBlockSize() == 40 * 1024);
    111     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, &alloc0));
    112     REPORTER_ASSERT(reporter, heap.freeSize() == 19 * 1024 && heap.largestBlockSize() == 19 * 1024);
    113     heap.free(alloc1);
    114     REPORTER_ASSERT(reporter, heap.freeSize() == 24 * 1024 && heap.largestBlockSize() == 24 * 1024);
    115     heap.free(alloc0);
    116     REPORTER_ASSERT(reporter, heap.freeSize() == 64 * 1024 && heap.largestBlockSize() == 64 * 1024);
    117 }
    118 
    119 void suballoc_test(skiatest::Reporter* reporter, GrContext* context) {
    120     GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu());
    121 
    122     // memtype/heap index don't matter, we're just testing the allocation algorithm so we'll use 0
    123     GrVkHeap heap(gpu, GrVkHeap::kSubAlloc_Strategy, 64 * 1024);
    124     GrVkAlloc alloc0, alloc1, alloc2, alloc3;
    125     const VkDeviceSize kAlignment = 16;
    126     const uint32_t kMemType = 0;
    127     const uint32_t kHeapIndex = 0;
    128 
    129     REPORTER_ASSERT(reporter, heap.allocSize() == 0 && heap.usedSize() == 0);
    130 
    131     // fragment allocations so we need to grow heap
    132     REPORTER_ASSERT(reporter, heap.alloc(19 * 1024 - 3, kAlignment, kMemType, kHeapIndex, &alloc0));
    133     REPORTER_ASSERT(reporter, heap.alloc(5 * 1024 - 9, kAlignment, kMemType, kHeapIndex, &alloc1));
    134     REPORTER_ASSERT(reporter, heap.alloc(15 * 1024 - 15, kAlignment, kMemType, kHeapIndex, &alloc2));
    135     REPORTER_ASSERT(reporter, heap.alloc(3 * 1024 - 6, kAlignment, kMemType, kHeapIndex, &alloc3));
    136     REPORTER_ASSERT(reporter, heap.allocSize() == 64 * 1024 && heap.usedSize() == 42 * 1024);
    137     heap.free(alloc0);
    138     REPORTER_ASSERT(reporter, heap.allocSize() == 64 * 1024 && heap.usedSize() == 23 * 1024);
    139     heap.free(alloc2);
    140     REPORTER_ASSERT(reporter, heap.allocSize() == 64 * 1024 && heap.usedSize() == 8 * 1024);
    141     // we expect the heap to grow here
    142     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
    143     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 48 * 1024);
    144     heap.free(alloc3);
    145     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 45 * 1024);
    146     // heap should not grow here (first subheap has exactly enough room)
    147     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, kAlignment, kMemType, kHeapIndex, &alloc3));
    148     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 85 * 1024);
    149     // heap should not grow here (second subheap has room)
    150     REPORTER_ASSERT(reporter, heap.alloc(22 * 1024, kAlignment, kMemType, kHeapIndex, &alloc2));
    151     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 107 * 1024);
    152     heap.free(alloc1);
    153     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 102 * 1024);
    154     heap.free(alloc0);
    155     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 62 * 1024);
    156     heap.free(alloc2);
    157     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 40 * 1024);
    158     heap.free(alloc3);
    159     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 0 * 1024);
    160     // heap should not grow here (allocating more than subheap size)
    161     REPORTER_ASSERT(reporter, heap.alloc(128 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
    162     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 0 * 1024);
    163     heap.free(alloc0);
    164     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
    165     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 24 * 1024);
    166     // heap should alloc a new subheap because the memory type is different
    167     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, kAlignment, kMemType+1, kHeapIndex, &alloc1));
    168     REPORTER_ASSERT(reporter, heap.allocSize() == 192 * 1024 && heap.usedSize() == 48 * 1024);
    169     // heap should alloc a new subheap because the alignment is different
    170     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, 128, kMemType, kHeapIndex, &alloc2));
    171     REPORTER_ASSERT(reporter, heap.allocSize() == 256 * 1024 && heap.usedSize() == 72 * 1024);
    172     heap.free(alloc2);
    173     heap.free(alloc0);
    174     heap.free(alloc1);
    175     REPORTER_ASSERT(reporter, heap.allocSize() == 256 * 1024 && heap.usedSize() == 0 * 1024);
    176 }
    177 
    178 void singlealloc_test(skiatest::Reporter* reporter, GrContext* context) {
    179     GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu());
    180 
    181     // memtype/heap index don't matter, we're just testing the allocation algorithm so we'll use 0
    182     GrVkHeap heap(gpu, GrVkHeap::kSingleAlloc_Strategy, 64 * 1024);
    183     GrVkAlloc alloc0, alloc1, alloc2, alloc3;
    184     const VkDeviceSize kAlignment = 64;
    185     const uint32_t kMemType = 0;
    186     const uint32_t kHeapIndex = 0;
    187 
    188     REPORTER_ASSERT(reporter, heap.allocSize() == 0 && heap.usedSize() == 0);
    189 
    190     // make a few allocations
    191     REPORTER_ASSERT(reporter, heap.alloc(49 * 1024 - 3, kAlignment, kMemType, kHeapIndex, &alloc0));
    192     REPORTER_ASSERT(reporter, heap.alloc(5 * 1024 - 37, kAlignment, kMemType, kHeapIndex, &alloc1));
    193     REPORTER_ASSERT(reporter, heap.alloc(15 * 1024 - 11, kAlignment, kMemType, kHeapIndex, &alloc2));
    194     REPORTER_ASSERT(reporter, heap.alloc(3 * 1024 - 29, kAlignment, kMemType, kHeapIndex, &alloc3));
    195     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 72 * 1024);
    196     heap.free(alloc0);
    197     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 23 * 1024);
    198     heap.free(alloc2);
    199     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 8 * 1024);
    200     // heap should not grow here (first subheap has room)
    201     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
    202     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 48 * 1024);
    203     heap.free(alloc3);
    204     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 45 * 1024);
    205     // check for exact fit -- heap should not grow here (third subheap has room)
    206     REPORTER_ASSERT(reporter, heap.alloc(15 * 1024 - 63, kAlignment, kMemType, kHeapIndex, &alloc2));
    207     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 60 * 1024);
    208     heap.free(alloc2);
    209     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 45 * 1024);
    210     // heap should grow here (no subheap has room)
    211     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, kAlignment, kMemType, kHeapIndex, &alloc3));
    212     REPORTER_ASSERT(reporter, heap.allocSize() == 112 * 1024 && heap.usedSize() == 85 * 1024);
    213     heap.free(alloc1);
    214     REPORTER_ASSERT(reporter, heap.allocSize() == 112 * 1024 && heap.usedSize() == 80 * 1024);
    215     heap.free(alloc0);
    216     REPORTER_ASSERT(reporter, heap.allocSize() == 112 * 1024 && heap.usedSize() == 40 * 1024);
    217     heap.free(alloc3);
    218     REPORTER_ASSERT(reporter, heap.allocSize() == 112 * 1024 && heap.usedSize() == 0 * 1024);
    219     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
    220     REPORTER_ASSERT(reporter, heap.allocSize() == 112 * 1024 && heap.usedSize() == 24 * 1024);
    221     // heap should alloc a new subheap because the memory type is different
    222     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, kAlignment, kMemType + 1, kHeapIndex, &alloc1));
    223     REPORTER_ASSERT(reporter, heap.allocSize() == 136 * 1024 && heap.usedSize() == 48 * 1024);
    224     // heap should alloc a new subheap because the alignment is different
    225     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, 128, kMemType, kHeapIndex, &alloc2));
    226     REPORTER_ASSERT(reporter, heap.allocSize() == 160 * 1024 && heap.usedSize() == 72 * 1024);
    227     heap.free(alloc1);
    228     heap.free(alloc2);
    229     heap.free(alloc0);
    230     REPORTER_ASSERT(reporter, heap.allocSize() == 160 * 1024 && heap.usedSize() == 0 * 1024);
    231 }
    232 
    233 DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkHeapTests, reporter, ctxInfo) {
    234     subheap_test(reporter, ctxInfo.grContext());
    235     suballoc_test(reporter, ctxInfo.grContext());
    236     singlealloc_test(reporter, ctxInfo.grContext());
    237 }
    238 
    239 #endif
    240