Home | History | Annotate | Download | only in util
      1 /**************************************************************************
      2  *
      3  * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
      4  * All Rights Reserved.
      5  *
      6  * Permission is hereby granted, free of charge, to any person obtaining a
      7  * copy of this software and associated documentation files (the
      8  * "Software"), to deal in the Software without restriction, including
      9  * without limitation the rights to use, copy, modify, merge, publish,
     10  * distribute, sub license, and/or sell copies of the Software, and to
     11  * permit persons to whom the Software is furnished to do so, subject to
     12  * the following conditions:
     13  *
     14  * The above copyright notice and this permission notice (including the
     15  * next paragraph) shall be included in all copies or substantial portions
     16  * of the Software.
     17  *
     18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
     19  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
     20  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
     21  * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
     22  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
     23  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
     24  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     25  *
     26  **************************************************************************/
     27 
     28 #ifndef U_INLINES_H
     29 #define U_INLINES_H
     30 
     31 #include "pipe/p_context.h"
     32 #include "pipe/p_defines.h"
     33 #include "pipe/p_shader_tokens.h"
     34 #include "pipe/p_state.h"
     35 #include "pipe/p_screen.h"
     36 #include "util/u_debug.h"
     37 #include "util/u_debug_describe.h"
     38 #include "util/u_debug_refcnt.h"
     39 #include "util/u_atomic.h"
     40 #include "util/u_box.h"
     41 #include "util/u_math.h"
     42 
     43 
     44 #ifdef __cplusplus
     45 extern "C" {
     46 #endif
     47 
     48 
     49 /*
     50  * Reference counting helper functions.
     51  */
     52 
     53 
     54 static INLINE void
     55 pipe_reference_init(struct pipe_reference *reference, unsigned count)
     56 {
     57    p_atomic_set(&reference->count, count);
     58 }
     59 
     60 static INLINE boolean
     61 pipe_is_referenced(struct pipe_reference *reference)
     62 {
     63    return p_atomic_read(&reference->count) != 0;
     64 }
     65 
     66 /**
     67  * Update reference counting.
     68  * The old thing pointed to, if any, will be unreferenced.
     69  * Both 'ptr' and 'reference' may be NULL.
     70  * \return TRUE if the object's refcount hits zero and should be destroyed.
     71  */
     72 static INLINE boolean
     73 pipe_reference_described(struct pipe_reference *ptr,
     74                          struct pipe_reference *reference,
     75                          debug_reference_descriptor get_desc)
     76 {
     77    boolean destroy = FALSE;
     78 
     79    if(ptr != reference) {
     80       /* bump the reference.count first */
     81       if (reference) {
     82          assert(pipe_is_referenced(reference));
     83          p_atomic_inc(&reference->count);
     84          debug_reference(reference, get_desc, 1);
     85       }
     86 
     87       if (ptr) {
     88          assert(pipe_is_referenced(ptr));
     89          if (p_atomic_dec_zero(&ptr->count)) {
     90             destroy = TRUE;
     91          }
     92          debug_reference(ptr, get_desc, -1);
     93       }
     94    }
     95 
     96    return destroy;
     97 }
     98 
     99 static INLINE boolean
    100 pipe_reference(struct pipe_reference *ptr, struct pipe_reference *reference)
    101 {
    102    return pipe_reference_described(ptr, reference,
    103                                    (debug_reference_descriptor)debug_describe_reference);
    104 }
    105 
    106 static INLINE void
    107 pipe_surface_reference(struct pipe_surface **ptr, struct pipe_surface *surf)
    108 {
    109    struct pipe_surface *old_surf = *ptr;
    110 
    111    if (pipe_reference_described(&(*ptr)->reference, &surf->reference,
    112                                 (debug_reference_descriptor)debug_describe_surface))
    113       old_surf->context->surface_destroy(old_surf->context, old_surf);
    114    *ptr = surf;
    115 }
    116 
    117 /**
    118  * Similar to pipe_surface_reference() but always set the pointer to NULL
    119  * and pass in an explicit context.  The explicit context avoids the problem
    120  * of using a deleted context's surface_destroy() method when freeing a surface
    121  * that's shared by multiple contexts.
    122  */
    123 static INLINE void
    124 pipe_surface_release(struct pipe_context *pipe, struct pipe_surface **ptr)
    125 {
    126    if (pipe_reference_described(&(*ptr)->reference, NULL,
    127                                 (debug_reference_descriptor)debug_describe_surface))
    128       pipe->surface_destroy(pipe, *ptr);
    129    *ptr = NULL;
    130 }
    131 
    132 
    133 static INLINE void
    134 pipe_resource_reference(struct pipe_resource **ptr, struct pipe_resource *tex)
    135 {
    136    struct pipe_resource *old_tex = *ptr;
    137 
    138    if (pipe_reference_described(&(*ptr)->reference, &tex->reference,
    139                                 (debug_reference_descriptor)debug_describe_resource))
    140       old_tex->screen->resource_destroy(old_tex->screen, old_tex);
    141    *ptr = tex;
    142 }
    143 
    144 static INLINE void
    145 pipe_sampler_view_reference(struct pipe_sampler_view **ptr, struct pipe_sampler_view *view)
    146 {
    147    struct pipe_sampler_view *old_view = *ptr;
    148 
    149    if (pipe_reference_described(&(*ptr)->reference, &view->reference,
    150                                 (debug_reference_descriptor)debug_describe_sampler_view))
    151       old_view->context->sampler_view_destroy(old_view->context, old_view);
    152    *ptr = view;
    153 }
    154 
    155 /**
    156  * Similar to pipe_sampler_view_reference() but always set the pointer to
    157  * NULL and pass in an explicit context.  Passing an explicit context is a
    158  * work-around for fixing a dangling context pointer problem when textures
    159  * are shared by multiple contexts.  XXX fix this someday.
    160  */
    161 static INLINE void
    162 pipe_sampler_view_release(struct pipe_context *ctx,
    163                           struct pipe_sampler_view **ptr)
    164 {
    165    struct pipe_sampler_view *old_view = *ptr;
    166    if (*ptr && (*ptr)->context != ctx) {
    167       debug_printf_once(("context mis-match in pipe_sampler_view_release()\n"));
    168    }
    169    if (pipe_reference_described(&(*ptr)->reference, NULL,
    170                     (debug_reference_descriptor)debug_describe_sampler_view)) {
    171       ctx->sampler_view_destroy(ctx, old_view);
    172    }
    173    *ptr = NULL;
    174 }
    175 
    176 
    177 static INLINE void
    178 pipe_so_target_reference(struct pipe_stream_output_target **ptr,
    179                          struct pipe_stream_output_target *target)
    180 {
    181    struct pipe_stream_output_target *old = *ptr;
    182 
    183    if (pipe_reference_described(&(*ptr)->reference, &target->reference,
    184                      (debug_reference_descriptor)debug_describe_so_target))
    185       old->context->stream_output_target_destroy(old->context, old);
    186    *ptr = target;
    187 }
    188 
    189 static INLINE void
    190 pipe_surface_reset(struct pipe_context *ctx, struct pipe_surface* ps,
    191                    struct pipe_resource *pt, unsigned level, unsigned layer,
    192                    unsigned flags)
    193 {
    194    pipe_resource_reference(&ps->texture, pt);
    195    ps->format = pt->format;
    196    ps->width = u_minify(pt->width0, level);
    197    ps->height = u_minify(pt->height0, level);
    198    ps->usage = flags;
    199    ps->u.tex.level = level;
    200    ps->u.tex.first_layer = ps->u.tex.last_layer = layer;
    201    ps->context = ctx;
    202 }
    203 
    204 static INLINE void
    205 pipe_surface_init(struct pipe_context *ctx, struct pipe_surface* ps,
    206                   struct pipe_resource *pt, unsigned level, unsigned layer,
    207                   unsigned flags)
    208 {
    209    ps->texture = 0;
    210    pipe_reference_init(&ps->reference, 1);
    211    pipe_surface_reset(ctx, ps, pt, level, layer, flags);
    212 }
    213 
    214 /* Return true if the surfaces are equal. */
    215 static INLINE boolean
    216 pipe_surface_equal(struct pipe_surface *s1, struct pipe_surface *s2)
    217 {
    218    return s1->texture == s2->texture &&
    219           s1->format == s2->format &&
    220           (s1->texture->target != PIPE_BUFFER ||
    221            (s1->u.buf.first_element == s2->u.buf.first_element &&
    222             s1->u.buf.last_element == s2->u.buf.last_element)) &&
    223           (s1->texture->target == PIPE_BUFFER ||
    224            (s1->u.tex.level == s2->u.tex.level &&
    225             s1->u.tex.first_layer == s2->u.tex.first_layer &&
    226             s1->u.tex.last_layer == s2->u.tex.last_layer));
    227 }
    228 
    229 /*
    230  * Convenience wrappers for screen buffer functions.
    231  */
    232 
    233 static INLINE struct pipe_resource *
    234 pipe_buffer_create( struct pipe_screen *screen,
    235 		    unsigned bind,
    236 		    unsigned usage,
    237 		    unsigned size )
    238 {
    239    struct pipe_resource buffer;
    240    memset(&buffer, 0, sizeof buffer);
    241    buffer.target = PIPE_BUFFER;
    242    buffer.format = PIPE_FORMAT_R8_UNORM; /* want TYPELESS or similar */
    243    buffer.bind = bind;
    244    buffer.usage = usage;
    245    buffer.flags = 0;
    246    buffer.width0 = size;
    247    buffer.height0 = 1;
    248    buffer.depth0 = 1;
    249    buffer.array_size = 1;
    250    return screen->resource_create(screen, &buffer);
    251 }
    252 
    253 static INLINE void *
    254 pipe_buffer_map_range(struct pipe_context *pipe,
    255 		      struct pipe_resource *buffer,
    256 		      unsigned offset,
    257 		      unsigned length,
    258 		      unsigned usage,
    259 		      struct pipe_transfer **transfer)
    260 {
    261    struct pipe_box box;
    262    void *map;
    263 
    264    assert(offset < buffer->width0);
    265    assert(offset + length <= buffer->width0);
    266    assert(length);
    267 
    268    u_box_1d(offset, length, &box);
    269 
    270    *transfer = pipe->get_transfer( pipe,
    271                                    buffer,
    272                                    0,
    273                                    usage,
    274                                    &box);
    275 
    276    if (*transfer == NULL)
    277       return NULL;
    278 
    279    map = pipe->transfer_map( pipe, *transfer );
    280    if (map == NULL) {
    281       pipe->transfer_destroy( pipe, *transfer );
    282       *transfer = NULL;
    283       return NULL;
    284    }
    285 
    286    return map;
    287 }
    288 
    289 
    290 static INLINE void *
    291 pipe_buffer_map(struct pipe_context *pipe,
    292                 struct pipe_resource *buffer,
    293                 unsigned usage,
    294                 struct pipe_transfer **transfer)
    295 {
    296    return pipe_buffer_map_range(pipe, buffer, 0, buffer->width0, usage, transfer);
    297 }
    298 
    299 
    300 static INLINE void
    301 pipe_buffer_unmap(struct pipe_context *pipe,
    302                   struct pipe_transfer *transfer)
    303 {
    304    if (transfer) {
    305       pipe->transfer_unmap(pipe, transfer);
    306       pipe->transfer_destroy(pipe, transfer);
    307    }
    308 }
    309 
    310 static INLINE void
    311 pipe_buffer_flush_mapped_range(struct pipe_context *pipe,
    312                                struct pipe_transfer *transfer,
    313                                unsigned offset,
    314                                unsigned length)
    315 {
    316    struct pipe_box box;
    317    int transfer_offset;
    318 
    319    assert(length);
    320    assert(transfer->box.x <= offset);
    321    assert(offset + length <= transfer->box.x + transfer->box.width);
    322 
    323    /* Match old screen->buffer_flush_mapped_range() behaviour, where
    324     * offset parameter is relative to the start of the buffer, not the
    325     * mapped range.
    326     */
    327    transfer_offset = offset - transfer->box.x;
    328 
    329    u_box_1d(transfer_offset, length, &box);
    330 
    331    pipe->transfer_flush_region(pipe, transfer, &box);
    332 }
    333 
    334 static INLINE void
    335 pipe_buffer_write(struct pipe_context *pipe,
    336                   struct pipe_resource *buf,
    337                   unsigned offset,
    338                   unsigned size,
    339                   const void *data)
    340 {
    341    struct pipe_box box;
    342    unsigned usage = PIPE_TRANSFER_WRITE;
    343 
    344    if (offset == 0 && size == buf->width0) {
    345       usage |= PIPE_TRANSFER_DISCARD_WHOLE_RESOURCE;
    346    } else {
    347       usage |= PIPE_TRANSFER_DISCARD_RANGE;
    348    }
    349 
    350    u_box_1d(offset, size, &box);
    351 
    352    pipe->transfer_inline_write( pipe,
    353                                 buf,
    354                                 0,
    355                                 usage,
    356                                 &box,
    357                                 data,
    358                                 size,
    359                                 0);
    360 }
    361 
    362 /**
    363  * Special case for writing non-overlapping ranges.
    364  *
    365  * We can avoid GPU/CPU synchronization when writing range that has never
    366  * been written before.
    367  */
    368 static INLINE void
    369 pipe_buffer_write_nooverlap(struct pipe_context *pipe,
    370                             struct pipe_resource *buf,
    371                             unsigned offset, unsigned size,
    372                             const void *data)
    373 {
    374    struct pipe_box box;
    375 
    376    u_box_1d(offset, size, &box);
    377 
    378    pipe->transfer_inline_write(pipe,
    379                                buf,
    380                                0,
    381                                (PIPE_TRANSFER_WRITE |
    382                                 PIPE_TRANSFER_UNSYNCHRONIZED),
    383                                &box,
    384                                data,
    385                                0, 0);
    386 }
    387 
    388 static INLINE struct pipe_resource *
    389 pipe_buffer_create_with_data(struct pipe_context *pipe,
    390                              unsigned bind,
    391                              unsigned usage,
    392                              unsigned size,
    393                              void *ptr)
    394 {
    395    struct pipe_resource *res = pipe_buffer_create(pipe->screen,
    396                                                   bind, usage, size);
    397    pipe_buffer_write_nooverlap(pipe, res, 0, size, ptr);
    398    return res;
    399 }
    400 
    401 static INLINE void
    402 pipe_buffer_read(struct pipe_context *pipe,
    403                  struct pipe_resource *buf,
    404                  unsigned offset,
    405                  unsigned size,
    406                  void *data)
    407 {
    408    struct pipe_transfer *src_transfer;
    409    ubyte *map;
    410 
    411    map = (ubyte *) pipe_buffer_map_range(pipe,
    412 					 buf,
    413 					 offset, size,
    414 					 PIPE_TRANSFER_READ,
    415 					 &src_transfer);
    416 
    417    if (map)
    418       memcpy(data, map, size);
    419 
    420    pipe_buffer_unmap(pipe, src_transfer);
    421 }
    422 
    423 static INLINE struct pipe_transfer *
    424 pipe_get_transfer( struct pipe_context *context,
    425                    struct pipe_resource *resource,
    426                    unsigned level, unsigned layer,
    427                    enum pipe_transfer_usage usage,
    428                    unsigned x, unsigned y,
    429                    unsigned w, unsigned h)
    430 {
    431    struct pipe_box box;
    432    u_box_2d_zslice( x, y, layer, w, h, &box );
    433    return context->get_transfer( context,
    434                                  resource,
    435                                  level,
    436                                  usage,
    437                                  &box );
    438 }
    439 
    440 static INLINE void *
    441 pipe_transfer_map( struct pipe_context *context,
    442                    struct pipe_transfer *transfer )
    443 {
    444    return context->transfer_map( context, transfer );
    445 }
    446 
    447 static INLINE void
    448 pipe_transfer_unmap( struct pipe_context *context,
    449                      struct pipe_transfer *transfer )
    450 {
    451    context->transfer_unmap( context, transfer );
    452 }
    453 
    454 
    455 static INLINE void
    456 pipe_transfer_destroy( struct pipe_context *context,
    457                        struct pipe_transfer *transfer )
    458 {
    459    context->transfer_destroy(context, transfer);
    460 }
    461 
    462 static INLINE void
    463 pipe_set_constant_buffer(struct pipe_context *pipe, uint shader, uint index,
    464                          struct pipe_resource *buf)
    465 {
    466    if (buf) {
    467       struct pipe_constant_buffer cb;
    468       cb.buffer = buf;
    469       cb.buffer_offset = 0;
    470       cb.buffer_size = buf->width0;
    471       cb.user_buffer = NULL;
    472       pipe->set_constant_buffer(pipe, shader, index, &cb);
    473    } else {
    474       pipe->set_constant_buffer(pipe, shader, index, NULL);
    475    }
    476 }
    477 
    478 
    479 static INLINE boolean util_get_offset(
    480    const struct pipe_rasterizer_state *templ,
    481    unsigned fill_mode)
    482 {
    483    switch(fill_mode) {
    484    case PIPE_POLYGON_MODE_POINT:
    485       return templ->offset_point;
    486    case PIPE_POLYGON_MODE_LINE:
    487       return templ->offset_line;
    488    case PIPE_POLYGON_MODE_FILL:
    489       return templ->offset_tri;
    490    default:
    491       assert(0);
    492       return FALSE;
    493    }
    494 }
    495 
    496 /**
    497  * This function is used to copy an array of pipe_vertex_buffer structures,
    498  * while properly referencing the pipe_vertex_buffer::buffer member.
    499  *
    500  * \sa util_copy_framebuffer_state
    501  */
    502 static INLINE void util_copy_vertex_buffers(struct pipe_vertex_buffer *dst,
    503                                             unsigned *dst_count,
    504                                             const struct pipe_vertex_buffer *src,
    505                                             unsigned src_count)
    506 {
    507    unsigned i;
    508 
    509    /* Reference the buffers of 'src' in 'dst'. */
    510    for (i = 0; i < src_count; i++) {
    511       pipe_resource_reference(&dst[i].buffer, src[i].buffer);
    512    }
    513    /* Unreference the rest of the buffers in 'dst'. */
    514    for (; i < *dst_count; i++) {
    515       pipe_resource_reference(&dst[i].buffer, NULL);
    516    }
    517 
    518    /* Update the size of 'dst' and copy over the other members
    519     * of pipe_vertex_buffer. */
    520    *dst_count = src_count;
    521    memcpy(dst, src, src_count * sizeof(struct pipe_vertex_buffer));
    522 }
    523 
    524 static INLINE float
    525 util_get_min_point_size(const struct pipe_rasterizer_state *state)
    526 {
    527    /* The point size should be clamped to this value at the rasterizer stage.
    528     */
    529    return state->gl_rasterization_rules &&
    530           !state->point_quad_rasterization &&
    531           !state->point_smooth &&
    532           !state->multisample ? 1.0f : 0.0f;
    533 }
    534 
    535 static INLINE void
    536 util_query_clear_result(union pipe_query_result *result, unsigned type)
    537 {
    538    switch (type) {
    539    case PIPE_QUERY_OCCLUSION_PREDICATE:
    540    case PIPE_QUERY_SO_OVERFLOW_PREDICATE:
    541    case PIPE_QUERY_GPU_FINISHED:
    542       result->b = FALSE;
    543       break;
    544    case PIPE_QUERY_OCCLUSION_COUNTER:
    545    case PIPE_QUERY_TIMESTAMP:
    546    case PIPE_QUERY_TIME_ELAPSED:
    547    case PIPE_QUERY_PRIMITIVES_GENERATED:
    548    case PIPE_QUERY_PRIMITIVES_EMITTED:
    549       result->u64 = 0;
    550       break;
    551    case PIPE_QUERY_SO_STATISTICS:
    552       memset(&result->so_statistics, 0, sizeof(result->so_statistics));
    553       break;
    554    case PIPE_QUERY_TIMESTAMP_DISJOINT:
    555       memset(&result->timestamp_disjoint, 0, sizeof(result->timestamp_disjoint));
    556       break;
    557    case PIPE_QUERY_PIPELINE_STATISTICS:
    558       memset(&result->pipeline_statistics, 0, sizeof(result->pipeline_statistics));
    559       break;
    560    default:
    561       assert(0);
    562    }
    563 }
    564 
    565 /** Convert PIPE_TEXTURE_x to TGSI_TEXTURE_x */
    566 static INLINE unsigned
    567 util_pipe_tex_to_tgsi_tex(enum pipe_texture_target pipe_tex_target,
    568                           unsigned nr_samples)
    569 {
    570    switch (pipe_tex_target) {
    571    case PIPE_TEXTURE_1D:
    572       assert(nr_samples <= 1);
    573       return TGSI_TEXTURE_1D;
    574 
    575    case PIPE_TEXTURE_2D:
    576       return nr_samples > 1 ? TGSI_TEXTURE_2D_MSAA : TGSI_TEXTURE_2D;
    577 
    578    case PIPE_TEXTURE_RECT:
    579       assert(nr_samples <= 1);
    580       return TGSI_TEXTURE_RECT;
    581 
    582    case PIPE_TEXTURE_3D:
    583       assert(nr_samples <= 1);
    584       return TGSI_TEXTURE_3D;
    585 
    586    case PIPE_TEXTURE_CUBE:
    587       assert(nr_samples <= 1);
    588       return TGSI_TEXTURE_CUBE;
    589 
    590    case PIPE_TEXTURE_1D_ARRAY:
    591       assert(nr_samples <= 1);
    592       return TGSI_TEXTURE_1D_ARRAY;
    593 
    594    case PIPE_TEXTURE_2D_ARRAY:
    595       return nr_samples > 1 ? TGSI_TEXTURE_2D_ARRAY_MSAA :
    596                               TGSI_TEXTURE_2D_ARRAY;
    597 
    598    default:
    599       assert(0 && "unexpected texture target");
    600       return TGSI_TEXTURE_UNKNOWN;
    601    }
    602 }
    603 
    604 #ifdef __cplusplus
    605 }
    606 #endif
    607 
    608 #endif /* U_INLINES_H */
    609