Home | History | Annotate | Download | only in libgralloc
      1 /*
      2  * Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
      3 
      4  * Redistribution and use in source and binary forms, with or without
      5  * modification, are permitted provided that the following conditions are
      6  * met:
      7  *   * Redistributions of source code must retain the above copyright
      8  *     notice, this list of conditions and the following disclaimer.
      9  *   * Redistributions in binary form must reproduce the above
     10  *     copyright notice, this list of conditions and the following
     11  *     disclaimer in the documentation and/or other materials provided
     12  *     with the distribution.
     13  *   * Neither the name of The Linux Foundation nor the names of its
     14  *     contributors may be used to endorse or promote products derived
     15  *     from this software without specific prior written permission.
     16  *
     17  * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     18  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     19  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     20  * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     21  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     22  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     23  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     24  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     25  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     26  * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     27  * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     28  */
     29 
     30 #include <cutils/log.h>
     31 #include <fcntl.h>
     32 #include <dlfcn.h>
     33 #include "gralloc_priv.h"
     34 #include "alloc_controller.h"
     35 #include "memalloc.h"
     36 #include "ionalloc.h"
     37 #include "gr.h"
     38 #include "comptype.h"
     39 
     40 #ifdef VENUS_COLOR_FORMAT
     41 #include <media/msm_media_info.h>
     42 #else
     43 #define VENUS_Y_STRIDE(args...) 0
     44 #define VENUS_Y_SCANLINES(args...) 0
     45 #define VENUS_BUFFER_SIZE(args...) 0
     46 #endif
     47 
     48 using namespace gralloc;
     49 using namespace qdutils;
     50 
     51 ANDROID_SINGLETON_STATIC_INSTANCE(AdrenoMemInfo);
     52 
     53 //Common functions
     54 static bool canFallback(int usage, bool triedSystem)
     55 {
     56     // Fallback to system heap when alloc fails unless
     57     // 1. Composition type is MDP
     58     // 2. Alloc from system heap was already tried
     59     // 3. The heap type is requsted explicitly
     60     // 4. The heap type is protected
     61     // 5. The buffer is meant for external display only
     62 
     63     if(QCCompositionType::getInstance().getCompositionType() &
     64        COMPOSITION_TYPE_MDP)
     65         return false;
     66     if(triedSystem)
     67         return false;
     68     if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PROTECTED))
     69         return false;
     70     if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PRIVATE_EXTERNAL_ONLY))
     71         return false;
     72     //Return true by default
     73     return true;
     74 }
     75 
     76 static bool useUncached(int usage)
     77 {
     78     // System heaps cannot be uncached
     79     if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
     80         return false;
     81     if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED)
     82         return true;
     83     return false;
     84 }
     85 
     86 //-------------- AdrenoMemInfo-----------------------//
     87 AdrenoMemInfo::AdrenoMemInfo()
     88 {
     89     libadreno_utils = ::dlopen("libadreno_utils.so", RTLD_NOW);
     90     if (libadreno_utils) {
     91         *(void **)&LINK_adreno_compute_padding = ::dlsym(libadreno_utils,
     92                                            "compute_surface_padding");
     93     }
     94 }
     95 
     96 AdrenoMemInfo::~AdrenoMemInfo()
     97 {
     98     if (libadreno_utils) {
     99         ::dlclose(libadreno_utils);
    100     }
    101 }
    102 
    103 int AdrenoMemInfo::getStride(int width, int format)
    104 {
    105     int stride = ALIGN(width, 32);
    106     // Currently surface padding is only computed for RGB* surfaces.
    107     if (format < 0x7) {
    108         int bpp = 4;
    109         switch(format)
    110         {
    111             case HAL_PIXEL_FORMAT_RGB_888:
    112                 bpp = 3;
    113                 break;
    114             case HAL_PIXEL_FORMAT_RGB_565:
    115             case HAL_PIXEL_FORMAT_RGBA_5551:
    116             case HAL_PIXEL_FORMAT_RGBA_4444:
    117                 bpp = 2;
    118                 break;
    119             default: break;
    120         }
    121         if ((libadreno_utils) && (LINK_adreno_compute_padding)) {
    122             int surface_tile_height = 1;   // Linear surface
    123             int raster_mode         = 0;   // Adreno unknown raster mode.
    124             int padding_threshold   = 512; // Threshold for padding surfaces.
    125             // the function below expects the width to be a multiple of
    126             // 32 pixels, hence we pass stride instead of width.
    127             stride = LINK_adreno_compute_padding(stride, bpp,
    128                                       surface_tile_height, raster_mode,
    129                                       padding_threshold);
    130         }
    131     } else {
    132         switch (format)
    133         {
    134             case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
    135             case HAL_PIXEL_FORMAT_RAW_SENSOR:
    136                 stride = ALIGN(width, 32);
    137                 break;
    138             case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
    139                 stride = ALIGN(width, 128);
    140                 break;
    141             case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
    142             case HAL_PIXEL_FORMAT_YCbCr_420_SP:
    143             case HAL_PIXEL_FORMAT_YCrCb_420_SP:
    144             case HAL_PIXEL_FORMAT_YV12:
    145             case HAL_PIXEL_FORMAT_YCbCr_422_SP:
    146             case HAL_PIXEL_FORMAT_YCrCb_422_SP:
    147                 stride = ALIGN(width, 16);
    148                 break;
    149             case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
    150                 stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
    151                 break;
    152             case HAL_PIXEL_FORMAT_BLOB:
    153                 stride = width;
    154                 break;
    155             default: break;
    156         }
    157     }
    158     return stride;
    159 }
    160 
    161 //-------------- IAllocController-----------------------//
    162 IAllocController* IAllocController::sController = NULL;
    163 IAllocController* IAllocController::getInstance(void)
    164 {
    165     if(sController == NULL) {
    166         sController = new IonController();
    167     }
    168     return sController;
    169 }
    170 
    171 
    172 //-------------- IonController-----------------------//
    173 IonController::IonController()
    174 {
    175     mIonAlloc = new IonAlloc();
    176 }
    177 
    178 int IonController::allocate(alloc_data& data, int usage)
    179 {
    180     int ionFlags = 0;
    181     int ret;
    182 
    183     data.uncached = useUncached(usage);
    184     data.allocType = 0;
    185 
    186     if(usage & GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP)
    187         ionFlags |= ION_HEAP(ION_SF_HEAP_ID);
    188 
    189     if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
    190         ionFlags |= ION_HEAP(ION_SYSTEM_HEAP_ID);
    191 
    192     if(usage & GRALLOC_USAGE_PRIVATE_IOMMU_HEAP)
    193         ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
    194 
    195     //MM Heap is exclusively a secure heap.
    196     if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
    197         //XXX: Right now the MM heap is the only secure heap we have. When we
    198         //have other secure heaps, we can change this.
    199         if(usage & GRALLOC_USAGE_PROTECTED) {
    200             ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
    201             ionFlags |= ION_SECURE;
    202         }
    203         else {
    204             ALOGW("GRALLOC_USAGE_PRIVATE_MM_HEAP \
    205                   cannot be used as an insecure heap!\
    206                   trying to use IOMMU instead !!");
    207             ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
    208         }
    209     }
    210 
    211     if(usage & GRALLOC_USAGE_PRIVATE_CAMERA_HEAP)
    212         ionFlags |= ION_HEAP(ION_CAMERA_HEAP_ID);
    213 
    214     if(usage & GRALLOC_USAGE_PROTECTED)
    215          data.allocType |= private_handle_t::PRIV_FLAGS_SECURE_BUFFER;
    216 
    217     // if no flags are set, default to
    218     // SF + IOMMU heaps, so that bypass can work
    219     // we can fall back to system heap if
    220     // we run out.
    221     if(!ionFlags)
    222         ionFlags = ION_HEAP(ION_SF_HEAP_ID) | ION_HEAP(ION_IOMMU_HEAP_ID);
    223 
    224     data.flags = ionFlags;
    225     ret = mIonAlloc->alloc_buffer(data);
    226 
    227     // Fallback
    228     if(ret < 0 && canFallback(usage,
    229                               (ionFlags & ION_SYSTEM_HEAP_ID)))
    230     {
    231         ALOGW("Falling back to system heap");
    232         data.flags = ION_HEAP(ION_SYSTEM_HEAP_ID);
    233         ret = mIonAlloc->alloc_buffer(data);
    234     }
    235 
    236     if(ret >= 0 ) {
    237         data.allocType |= private_handle_t::PRIV_FLAGS_USES_ION;
    238     }
    239 
    240     return ret;
    241 }
    242 
    243 IMemAlloc* IonController::getAllocator(int flags)
    244 {
    245     IMemAlloc* memalloc = NULL;
    246     if (flags & private_handle_t::PRIV_FLAGS_USES_ION) {
    247         memalloc = mIonAlloc;
    248     } else {
    249         ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
    250     }
    251 
    252     return memalloc;
    253 }
    254 
    255 size_t getBufferSizeAndDimensions(int width, int height, int format,
    256                                   int& alignedw, int &alignedh)
    257 {
    258     size_t size;
    259 
    260     alignedw = AdrenoMemInfo::getInstance().getStride(width, format);
    261     alignedh = ALIGN(height, 32);
    262     switch (format) {
    263         case HAL_PIXEL_FORMAT_RGBA_8888:
    264         case HAL_PIXEL_FORMAT_RGBX_8888:
    265         case HAL_PIXEL_FORMAT_BGRA_8888:
    266             size = alignedw * alignedh * 4;
    267             break;
    268         case HAL_PIXEL_FORMAT_RGB_888:
    269             size = alignedw * alignedh * 3;
    270             break;
    271         case HAL_PIXEL_FORMAT_RGB_565:
    272         case HAL_PIXEL_FORMAT_RGBA_5551:
    273         case HAL_PIXEL_FORMAT_RGBA_4444:
    274         case HAL_PIXEL_FORMAT_RAW_SENSOR:
    275             size = alignedw * alignedh * 2;
    276             break;
    277 
    278             // adreno formats
    279         case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:  // NV21
    280             size  = ALIGN(alignedw*alignedh, 4096);
    281             size += ALIGN(2 * ALIGN(width/2, 32) * ALIGN(height/2, 32), 4096);
    282             break;
    283         case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:   // NV12
    284             // The chroma plane is subsampled,
    285             // but the pitch in bytes is unchanged
    286             // The GPU needs 4K alignment, but the video decoder needs 8K
    287             size  = ALIGN( alignedw * alignedh, 8192);
    288             size += ALIGN( alignedw * ALIGN(height/2, 32), 8192);
    289             break;
    290         case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
    291         case HAL_PIXEL_FORMAT_YV12:
    292             if ((format == HAL_PIXEL_FORMAT_YV12) && ((width&1) || (height&1))) {
    293                 ALOGE("w or h is odd for the YV12 format");
    294                 return -EINVAL;
    295             }
    296             alignedh = height;
    297             if (HAL_PIXEL_FORMAT_NV12_ENCODEABLE == format) {
    298                 // The encoder requires a 2K aligned chroma offset.
    299                 size = ALIGN(alignedw*alignedh, 2048) +
    300                     (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
    301             } else {
    302                 size = alignedw*alignedh +
    303                     (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
    304             }
    305             size = ALIGN(size, 4096);
    306             break;
    307         case HAL_PIXEL_FORMAT_YCbCr_420_SP:
    308         case HAL_PIXEL_FORMAT_YCrCb_420_SP:
    309             alignedh = height;
    310             size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2, 4096);
    311             break;
    312         case HAL_PIXEL_FORMAT_YCbCr_422_SP:
    313         case HAL_PIXEL_FORMAT_YCrCb_422_SP:
    314             if(width & 1) {
    315                 ALOGE("width is odd for the YUV422_SP format");
    316                 return -EINVAL;
    317             }
    318             alignedh = height;
    319             size = ALIGN(alignedw * alignedh * 2, 4096);
    320             break;
    321         case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
    322             alignedh = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
    323             size = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, width, height);
    324             break;
    325         case HAL_PIXEL_FORMAT_BLOB:
    326             if(height != 1) {
    327                 ALOGE("%s: Buffers with format HAL_PIXEL_FORMAT_BLOB \
    328                       must have height==1 ", __FUNCTION__);
    329                 return -EINVAL;
    330             }
    331             alignedh = height;
    332             alignedw = width;
    333             size = width;
    334             break;
    335         default:
    336             ALOGE("unrecognized pixel format: 0x%x", format);
    337             return -EINVAL;
    338     }
    339 
    340     return size;
    341 }
    342 
    343 // Allocate buffer from width, height and format into a
    344 // private_handle_t. It is the responsibility of the caller
    345 // to free the buffer using the free_buffer function
    346 int alloc_buffer(private_handle_t **pHnd, int w, int h, int format, int usage)
    347 {
    348     alloc_data data;
    349     int alignedw, alignedh;
    350     gralloc::IAllocController* sAlloc =
    351         gralloc::IAllocController::getInstance();
    352     data.base = 0;
    353     data.fd = -1;
    354     data.offset = 0;
    355     data.size = getBufferSizeAndDimensions(w, h, format, alignedw, alignedh);
    356     data.align = getpagesize();
    357     data.uncached = useUncached(usage);
    358     int allocFlags = usage;
    359 
    360     int err = sAlloc->allocate(data, allocFlags);
    361     if (0 != err) {
    362         ALOGE("%s: allocate failed", __FUNCTION__);
    363         return -ENOMEM;
    364     }
    365 
    366     private_handle_t* hnd = new private_handle_t(data.fd, data.size,
    367                                                  data.allocType, 0, format,
    368                                                  alignedw, alignedh);
    369     hnd->base = (int) data.base;
    370     hnd->offset = data.offset;
    371     hnd->gpuaddr = 0;
    372     *pHnd = hnd;
    373     return 0;
    374 }
    375 
    376 void free_buffer(private_handle_t *hnd)
    377 {
    378     gralloc::IAllocController* sAlloc =
    379         gralloc::IAllocController::getInstance();
    380     if (hnd && hnd->fd > 0) {
    381         IMemAlloc* memalloc = sAlloc->getAllocator(hnd->flags);
    382         memalloc->free_buffer((void*)hnd->base, hnd->size, hnd->offset, hnd->fd);
    383     }
    384     if(hnd)
    385         delete hnd;
    386 
    387 }
    388