Home | History | Annotate | Download | only in i915
      1 /**************************************************************************
      2  *
      3  * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
      4  * All Rights Reserved.
      5  *
      6  * Permission is hereby granted, free of charge, to any person obtaining a
      7  * copy of this software and associated documentation files (the
      8  * "Software"), to deal in the Software without restriction, including
      9  * without limitation the rights to use, copy, modify, merge, publish,
     10  * distribute, sub license, and/or sell copies of the Software, and to
     11  * permit persons to whom the Software is furnished to do so, subject to
     12  * the following conditions:
     13  *
     14  * The above copyright notice and this permission notice (including the
     15  * next paragraph) shall be included in all copies or substantial portions
     16  * of the Software.
     17  *
     18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
     19  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
     20  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
     21  * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
     22  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
     23  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
     24  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     25  *
     26  **************************************************************************/
     27 
     28 /**
     29  * \file
     30  * Build post-transformation, post-clipping vertex buffers and element
     31  * lists by hooking into the end of the primitive pipeline and
     32  * manipulating the vertex_id field in the vertex headers.
     33  *
     34  * XXX: work in progress
     35  *
     36  * \author Jos Fonseca <jrfonseca (at) tungstengraphics.com>
     37  * \author Keith Whitwell <keith (at) tungstengraphics.com>
     38  */
     39 
     40 
     41 #include "draw/draw_context.h"
     42 #include "draw/draw_vbuf.h"
     43 #include "util/u_debug.h"
     44 #include "util/u_inlines.h"
     45 #include "util/u_math.h"
     46 #include "util/u_memory.h"
     47 #include "util/u_fifo.h"
     48 
     49 #include "i915_context.h"
     50 #include "i915_reg.h"
     51 #include "i915_batch.h"
     52 #include "i915_state.h"
     53 
     54 
     55 #define VBUF_MAP_BUFFER
     56 
     57 /**
     58  * Primitive renderer for i915.
     59  */
     60 struct i915_vbuf_render {
     61    struct vbuf_render base;
     62 
     63    struct i915_context *i915;
     64 
     65    /** Vertex size in bytes */
     66    size_t vertex_size;
     67 
     68    /** Software primitive */
     69    unsigned prim;
     70 
     71    /** Hardware primitive */
     72    unsigned hwprim;
     73 
     74    /** Genereate a vertex list */
     75    unsigned fallback;
     76 
     77    /* Stuff for the vbo */
     78    struct i915_winsys_buffer *vbo;
     79    size_t vbo_size; /**< current size of allocated buffer */
     80    size_t vbo_alloc_size; /**< minimum buffer size to allocate */
     81    size_t vbo_hw_offset; /**< offset that we program the hardware with */
     82    size_t vbo_sw_offset; /**< offset that we work with */
     83    size_t vbo_index; /**< index offset to be added to all indices */
     84    void *vbo_ptr;
     85    size_t vbo_max_used;
     86    size_t vbo_max_index; /**< index offset to be added to all indices */
     87 
     88 #ifndef VBUF_MAP_BUFFER
     89    size_t map_used_start;
     90    size_t map_used_end;
     91    size_t map_size;
     92 #endif
     93 };
     94 
     95 
     96 /**
     97  * Basically a cast wrapper.
     98  */
     99 static INLINE struct i915_vbuf_render *
    100 i915_vbuf_render(struct vbuf_render *render)
    101 {
    102    assert(render);
    103    return (struct i915_vbuf_render *)render;
    104 }
    105 
    106 /**
    107  * If vbo state differs between renderer and context
    108  * push state to the context. This function pushes
    109  * hw_offset to i915->vbo_offset and vbo to i915->vbo.
    110  *
    111  * Side effects:
    112  *    May updates context vbo_offset and vbo fields.
    113  */
    114 static void
    115 i915_vbuf_update_vbo_state(struct vbuf_render *render)
    116 {
    117    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    118    struct i915_context *i915 = i915_render->i915;
    119 
    120    if (i915->vbo != i915_render->vbo ||
    121        i915->vbo_offset != i915_render->vbo_hw_offset) {
    122       i915->vbo = i915_render->vbo;
    123       i915->vbo_offset = i915_render->vbo_hw_offset;
    124       i915->dirty |= I915_NEW_VBO;
    125    }
    126 }
    127 
    128 /**
    129  * Callback exported to the draw module.
    130  * Returns the current vertex_info.
    131  *
    132  * Side effects:
    133  *    If state is dirty update derived state.
    134  */
    135 static const struct vertex_info *
    136 i915_vbuf_render_get_vertex_info(struct vbuf_render *render)
    137 {
    138    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    139    struct i915_context *i915 = i915_render->i915;
    140 
    141    if (i915->dirty) {
    142       /* make sure we have up to date vertex layout */
    143       i915_update_derived(i915);
    144    }
    145 
    146    return &i915->current.vertex_info;
    147 }
    148 
    149 /**
    150  * Reserve space in the vbo for vertices.
    151  *
    152  * Side effects:
    153  *    None.
    154  */
    155 static boolean
    156 i915_vbuf_render_reserve(struct i915_vbuf_render *i915_render, size_t size)
    157 {
    158    struct i915_context *i915 = i915_render->i915;
    159 
    160    if (i915_render->vbo_size < size + i915_render->vbo_sw_offset)
    161       return FALSE;
    162 
    163    if (i915->vbo_flushed)
    164       return FALSE;
    165 
    166    return TRUE;
    167 }
    168 
    169 /**
    170  * Allocate a new vbo buffer should there not be enough space for
    171  * the requested number of vertices by the draw module.
    172  *
    173  * Side effects:
    174  *    Updates hw_offset, sw_offset, index and allocates a new buffer.
    175  *    Will set i915->vbo to null on buffer allocation.
    176  */
    177 static void
    178 i915_vbuf_render_new_buf(struct i915_vbuf_render *i915_render, size_t size)
    179 {
    180    struct i915_context *i915 = i915_render->i915;
    181    struct i915_winsys *iws = i915->iws;
    182 
    183    if (i915_render->vbo) {
    184       iws->buffer_unmap(iws, i915_render->vbo);
    185       iws->buffer_destroy(iws, i915_render->vbo);
    186       /*
    187        * XXX If buffers where referenced then this should be done in
    188        * update_vbo_state but since they arn't and malloc likes to reuse
    189        * memory we need to set it to null
    190        */
    191       i915->vbo = NULL;
    192       i915_render->vbo = NULL;
    193    }
    194 
    195    i915->vbo_flushed = 0;
    196 
    197    i915_render->vbo_size = MAX2(size, i915_render->vbo_alloc_size);
    198    i915_render->vbo_hw_offset = 0;
    199    i915_render->vbo_sw_offset = 0;
    200    i915_render->vbo_index = 0;
    201 
    202 #ifndef VBUF_MAP_BUFFER
    203    if (i915_render->vbo_size > i915_render->map_size) {
    204       i915_render->map_size = i915_render->vbo_size;
    205       FREE(i915_render->vbo_ptr);
    206       i915_render->vbo_ptr = MALLOC(i915_render->map_size);
    207    }
    208 #endif
    209 
    210    i915_render->vbo = iws->buffer_create(iws, i915_render->vbo_size,
    211                                          I915_NEW_VERTEX);
    212    i915_render->vbo_ptr = iws->buffer_map(iws, i915_render->vbo, TRUE);
    213 }
    214 
    215 /**
    216  * Callback exported to the draw module.
    217  *
    218  * Side effects:
    219  *    Updates hw_offset, sw_offset, index and may allocate
    220  *    a new buffer. Also updates may update the vbo state
    221  *    on the i915 context.
    222  */
    223 static boolean
    224 i915_vbuf_render_allocate_vertices(struct vbuf_render *render,
    225                                    ushort vertex_size,
    226                                    ushort nr_vertices)
    227 {
    228    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    229    size_t size = (size_t)vertex_size * (size_t)nr_vertices;
    230    size_t offset;
    231 
    232    /*
    233     * Align sw_offset with first multiple of vertex size from hw_offset.
    234     * Set index to be the multiples from from hw_offset to sw_offset.
    235     * i915_vbuf_render_new_buf will reset index, sw_offset, hw_offset
    236     * when it allocates a new buffer this is correct.
    237     */
    238    {
    239       offset = i915_render->vbo_sw_offset - i915_render->vbo_hw_offset;
    240       offset = util_align_npot(offset, vertex_size);
    241       i915_render->vbo_sw_offset = i915_render->vbo_hw_offset + offset;
    242       i915_render->vbo_index = offset / vertex_size;
    243    }
    244 
    245    if (!i915_vbuf_render_reserve(i915_render, size))
    246       i915_vbuf_render_new_buf(i915_render, size);
    247 
    248    /*
    249     * If a new buffer has been alocated sw_offset,
    250     * hw_offset & index will be reset by new_buf
    251     */
    252 
    253    i915_render->vertex_size = vertex_size;
    254 
    255    i915_vbuf_update_vbo_state(render);
    256 
    257    if (!i915_render->vbo)
    258       return FALSE;
    259    return TRUE;
    260 }
    261 
    262 static void *
    263 i915_vbuf_render_map_vertices(struct vbuf_render *render)
    264 {
    265    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    266    struct i915_context *i915 = i915_render->i915;
    267 
    268    if (i915->vbo_flushed)
    269       debug_printf("%s bad vbo flush occured stalling on hw\n", __FUNCTION__);
    270 
    271 #ifdef VBUF_MAP_BUFFER
    272    return (unsigned char *)i915_render->vbo_ptr + i915_render->vbo_sw_offset;
    273 #else
    274    return (unsigned char *)i915_render->vbo_ptr;
    275 #endif
    276 }
    277 
    278 static void
    279 i915_vbuf_render_unmap_vertices(struct vbuf_render *render,
    280                                 ushort min_index,
    281                                 ushort max_index)
    282 {
    283    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    284    struct i915_context *i915 = i915_render->i915;
    285    struct i915_winsys *iws = i915->iws;
    286 
    287    i915_render->vbo_max_index = max_index;
    288    i915_render->vbo_max_used = MAX2(i915_render->vbo_max_used, i915_render->vertex_size * (max_index + 1));
    289 #ifdef VBUF_MAP_BUFFER
    290    (void)iws;
    291 #else
    292    i915_render->map_used_start = i915_render->vertex_size * min_index;
    293    i915_render->map_used_end = i915_render->vertex_size * (max_index + 1);
    294    iws->buffer_write(iws, i915_render->vbo,
    295                      i915_render->map_used_start + i915_render->vbo_sw_offset,
    296                      i915_render->map_used_end - i915_render->map_used_start,
    297                      (unsigned char *)i915_render->vbo_ptr + i915_render->map_used_start);
    298 
    299 #endif
    300 }
    301 
    302 /**
    303  * Ensure that the given max_index given is not larger ushort max.
    304  * If it is larger then ushort max it advanced the hw_offset to the
    305  * same position in the vbo as sw_offset and set index to zero.
    306  *
    307  * Side effects:
    308  *    On failure update hw_offset and index.
    309  */
    310 static void
    311 i915_vbuf_ensure_index_bounds(struct vbuf_render *render,
    312                               unsigned max_index)
    313 {
    314    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    315 
    316    if (max_index + i915_render->vbo_index < ((1 << 17) - 1))
    317       return;
    318 
    319    i915_render->vbo_hw_offset = i915_render->vbo_sw_offset;
    320    i915_render->vbo_index = 0;
    321 
    322    i915_vbuf_update_vbo_state(render);
    323 }
    324 
    325 static void
    326 i915_vbuf_render_set_primitive(struct vbuf_render *render,
    327                                unsigned prim)
    328 {
    329    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    330    i915_render->prim = prim;
    331 
    332    switch(prim) {
    333    case PIPE_PRIM_POINTS:
    334       i915_render->hwprim = PRIM3D_POINTLIST;
    335       i915_render->fallback = 0;
    336       break;
    337    case PIPE_PRIM_LINES:
    338       i915_render->hwprim = PRIM3D_LINELIST;
    339       i915_render->fallback = 0;
    340       break;
    341    case PIPE_PRIM_LINE_LOOP:
    342       i915_render->hwprim = PRIM3D_LINELIST;
    343       i915_render->fallback = PIPE_PRIM_LINE_LOOP;
    344       break;
    345    case PIPE_PRIM_LINE_STRIP:
    346       i915_render->hwprim = PRIM3D_LINESTRIP;
    347       i915_render->fallback = 0;
    348       break;
    349    case PIPE_PRIM_TRIANGLES:
    350       i915_render->hwprim = PRIM3D_TRILIST;
    351       i915_render->fallback = 0;
    352       break;
    353    case PIPE_PRIM_TRIANGLE_STRIP:
    354       i915_render->hwprim = PRIM3D_TRISTRIP;
    355       i915_render->fallback = 0;
    356       break;
    357    case PIPE_PRIM_TRIANGLE_FAN:
    358       i915_render->hwprim = PRIM3D_TRIFAN;
    359       i915_render->fallback = 0;
    360       break;
    361    case PIPE_PRIM_QUADS:
    362       i915_render->hwprim = PRIM3D_TRILIST;
    363       i915_render->fallback = PIPE_PRIM_QUADS;
    364       break;
    365    case PIPE_PRIM_QUAD_STRIP:
    366       i915_render->hwprim = PRIM3D_TRILIST;
    367       i915_render->fallback = PIPE_PRIM_QUAD_STRIP;
    368       break;
    369    case PIPE_PRIM_POLYGON:
    370       i915_render->hwprim = PRIM3D_POLY;
    371       i915_render->fallback = 0;
    372       break;
    373    default:
    374       /* FIXME: Actually, can handle a lot more just fine... */
    375       assert(0 && "unexpected prim in i915_vbuf_render_set_primitive()");
    376    }
    377 }
    378 
    379 /**
    380  * Used for fallbacks in draw_arrays
    381  */
    382 static void
    383 draw_arrays_generate_indices(struct vbuf_render *render,
    384                              unsigned start, uint nr,
    385                              unsigned type)
    386 {
    387    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    388    struct i915_context *i915 = i915_render->i915;
    389    unsigned i;
    390    unsigned end = start + nr + i915_render->vbo_index;
    391    start += i915_render->vbo_index;
    392 
    393    switch(type) {
    394    case 0:
    395       for (i = start; i+1 < end; i += 2)
    396          OUT_BATCH((i+0) | (i+1) << 16);
    397       if (i < end)
    398          OUT_BATCH(i);
    399       break;
    400    case PIPE_PRIM_LINE_LOOP:
    401       if (nr >= 2) {
    402          for (i = start + 1; i < end; i++)
    403             OUT_BATCH((i-1) | (i+0) << 16);
    404          OUT_BATCH((i-1) | ( start) << 16);
    405       }
    406       break;
    407    case PIPE_PRIM_QUADS:
    408       for (i = start; i + 3 < end; i += 4) {
    409          OUT_BATCH((i+0) | (i+1) << 16);
    410          OUT_BATCH((i+3) | (i+1) << 16);
    411          OUT_BATCH((i+2) | (i+3) << 16);
    412       }
    413       break;
    414    case PIPE_PRIM_QUAD_STRIP:
    415       for (i = start; i + 3 < end; i += 2) {
    416          OUT_BATCH((i+0) | (i+1) << 16);
    417          OUT_BATCH((i+3) | (i+2) << 16);
    418          OUT_BATCH((i+0) | (i+3) << 16);
    419       }
    420       break;
    421    default:
    422       assert(0);
    423    }
    424 }
    425 
    426 static unsigned
    427 draw_arrays_calc_nr_indices(uint nr, unsigned type)
    428 {
    429    switch (type) {
    430    case 0:
    431       return nr;
    432    case PIPE_PRIM_LINE_LOOP:
    433       if (nr >= 2)
    434          return nr * 2;
    435       else
    436          return 0;
    437    case PIPE_PRIM_QUADS:
    438       return (nr / 4) * 6;
    439    case PIPE_PRIM_QUAD_STRIP:
    440       return ((nr - 2) / 2) * 6;
    441    default:
    442       assert(0);
    443       return 0;
    444    }
    445 }
    446 
    447 static void
    448 draw_arrays_fallback(struct vbuf_render *render,
    449                      unsigned start,
    450                      uint nr)
    451 {
    452    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    453    struct i915_context *i915 = i915_render->i915;
    454    unsigned nr_indices;
    455 
    456    nr_indices = draw_arrays_calc_nr_indices(nr, i915_render->fallback);
    457    if (!nr_indices)
    458       return;
    459 
    460    i915_vbuf_ensure_index_bounds(render, start + nr_indices);
    461 
    462    if (i915->dirty)
    463       i915_update_derived(i915);
    464 
    465    if (i915->hardware_dirty)
    466       i915_emit_hardware_state(i915);
    467 
    468    if (!BEGIN_BATCH(1 + (nr_indices + 1)/2)) {
    469       FLUSH_BATCH(NULL);
    470 
    471       /* Make sure state is re-emitted after a flush:
    472        */
    473       i915_emit_hardware_state(i915);
    474       i915->vbo_flushed = 1;
    475 
    476       if (!BEGIN_BATCH(1 + (nr_indices + 1)/2)) {
    477          assert(0);
    478          goto out;
    479       }
    480    }
    481 
    482    OUT_BATCH(_3DPRIMITIVE |
    483              PRIM_INDIRECT |
    484              i915_render->hwprim |
    485              PRIM_INDIRECT_ELTS |
    486              nr_indices);
    487 
    488    draw_arrays_generate_indices(render, start, nr, i915_render->fallback);
    489 
    490 out:
    491    return;
    492 }
    493 
    494 static void
    495 i915_vbuf_render_draw_arrays(struct vbuf_render *render,
    496                              unsigned start,
    497                              uint nr)
    498 {
    499    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    500    struct i915_context *i915 = i915_render->i915;
    501 
    502    if (i915_render->fallback) {
    503       draw_arrays_fallback(render, start, nr);
    504       return;
    505    }
    506 
    507    i915_vbuf_ensure_index_bounds(render, start + nr);
    508    start += i915_render->vbo_index;
    509 
    510    if (i915->dirty)
    511       i915_update_derived(i915);
    512 
    513    if (i915->hardware_dirty)
    514       i915_emit_hardware_state(i915);
    515 
    516    if (!BEGIN_BATCH(2)) {
    517       FLUSH_BATCH(NULL);
    518 
    519       /* Make sure state is re-emitted after a flush:
    520        */
    521       i915_emit_hardware_state(i915);
    522       i915->vbo_flushed = 1;
    523 
    524       if (!BEGIN_BATCH(2)) {
    525          assert(0);
    526          goto out;
    527       }
    528    }
    529 
    530    OUT_BATCH(_3DPRIMITIVE |
    531              PRIM_INDIRECT |
    532              PRIM_INDIRECT_SEQUENTIAL |
    533              i915_render->hwprim |
    534              nr);
    535    OUT_BATCH(start); /* Beginning vertex index */
    536 
    537 out:
    538    return;
    539 }
    540 
    541 /**
    542  * Used for normal and fallback emitting of indices
    543  * If type is zero normal operation assumed.
    544  */
    545 static void
    546 draw_generate_indices(struct vbuf_render *render,
    547                       const ushort *indices,
    548                       uint nr_indices,
    549                       unsigned type)
    550 {
    551    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    552    struct i915_context *i915 = i915_render->i915;
    553    unsigned i;
    554    unsigned o = i915_render->vbo_index;
    555 
    556    switch(type) {
    557    case 0:
    558       for (i = 0; i + 1 < nr_indices; i += 2) {
    559          OUT_BATCH((o+indices[i]) | (o+indices[i+1]) << 16);
    560       }
    561       if (i < nr_indices) {
    562          OUT_BATCH((o+indices[i]));
    563       }
    564       break;
    565    case PIPE_PRIM_LINE_LOOP:
    566       if (nr_indices >= 2) {
    567          for (i = 1; i < nr_indices; i++)
    568             OUT_BATCH((o+indices[i-1]) | (o+indices[i]) << 16);
    569          OUT_BATCH((o+indices[i-1]) | (o+indices[0]) << 16);
    570       }
    571       break;
    572    case PIPE_PRIM_QUADS:
    573       for (i = 0; i + 3 < nr_indices; i += 4) {
    574          OUT_BATCH((o+indices[i+0]) | (o+indices[i+1]) << 16);
    575          OUT_BATCH((o+indices[i+3]) | (o+indices[i+1]) << 16);
    576          OUT_BATCH((o+indices[i+2]) | (o+indices[i+3]) << 16);
    577       }
    578       break;
    579    case PIPE_PRIM_QUAD_STRIP:
    580       for (i = 0; i + 3 < nr_indices; i += 2) {
    581          OUT_BATCH((o+indices[i+0]) | (o+indices[i+1]) << 16);
    582          OUT_BATCH((o+indices[i+3]) | (o+indices[i+2]) << 16);
    583          OUT_BATCH((o+indices[i+0]) | (o+indices[i+3]) << 16);
    584       }
    585       break;
    586    default:
    587       assert(0);
    588       break;
    589    }
    590 }
    591 
    592 static unsigned
    593 draw_calc_nr_indices(uint nr_indices, unsigned type)
    594 {
    595    switch (type) {
    596    case 0:
    597       return nr_indices;
    598    case PIPE_PRIM_LINE_LOOP:
    599       if (nr_indices >= 2)
    600          return nr_indices * 2;
    601       else
    602          return 0;
    603    case PIPE_PRIM_QUADS:
    604       return (nr_indices / 4) * 6;
    605    case PIPE_PRIM_QUAD_STRIP:
    606       return ((nr_indices - 2) / 2) * 6;
    607    default:
    608       assert(0);
    609       return 0;
    610    }
    611 }
    612 
    613 static void
    614 i915_vbuf_render_draw_elements(struct vbuf_render *render,
    615                                const ushort *indices,
    616                                uint nr_indices)
    617 {
    618    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    619    struct i915_context *i915 = i915_render->i915;
    620    unsigned save_nr_indices;
    621 
    622    save_nr_indices = nr_indices;
    623 
    624    nr_indices = draw_calc_nr_indices(nr_indices, i915_render->fallback);
    625    if (!nr_indices)
    626       return;
    627 
    628    i915_vbuf_ensure_index_bounds(render, i915_render->vbo_max_index);
    629 
    630    if (i915->dirty)
    631       i915_update_derived(i915);
    632 
    633    if (i915->hardware_dirty)
    634       i915_emit_hardware_state(i915);
    635 
    636    if (!BEGIN_BATCH(1 + (nr_indices + 1)/2)) {
    637       FLUSH_BATCH(NULL);
    638 
    639       /* Make sure state is re-emitted after a flush:
    640        */
    641       i915_emit_hardware_state(i915);
    642       i915->vbo_flushed = 1;
    643 
    644       if (!BEGIN_BATCH(1 + (nr_indices + 1)/2)) {
    645          assert(0);
    646          goto out;
    647       }
    648    }
    649 
    650    OUT_BATCH(_3DPRIMITIVE |
    651              PRIM_INDIRECT |
    652              i915_render->hwprim |
    653              PRIM_INDIRECT_ELTS |
    654              nr_indices);
    655    draw_generate_indices(render,
    656                          indices,
    657                          save_nr_indices,
    658                          i915_render->fallback);
    659 
    660 out:
    661    return;
    662 }
    663 
    664 static void
    665 i915_vbuf_render_release_vertices(struct vbuf_render *render)
    666 {
    667    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    668 
    669    i915_render->vbo_sw_offset += i915_render->vbo_max_used;
    670    i915_render->vbo_max_used = 0;
    671 
    672    /*
    673     * Micro optimization, by calling update here we the offset change
    674     * will be picked up on the next pipe_context::draw_*.
    675     */
    676    i915_vbuf_update_vbo_state(render);
    677 }
    678 
    679 static void
    680 i915_vbuf_render_destroy(struct vbuf_render *render)
    681 {
    682    struct i915_vbuf_render *i915_render = i915_vbuf_render(render);
    683    struct i915_context *i915 = i915_render->i915;
    684    struct i915_winsys *iws = i915->iws;
    685 
    686    if (i915_render->vbo) {
    687       i915->vbo = NULL;
    688       iws->buffer_unmap(iws, i915_render->vbo);
    689       iws->buffer_destroy(iws, i915_render->vbo);
    690    }
    691 
    692    FREE(i915_render);
    693 }
    694 
    695 /**
    696  * Create a new primitive render.
    697  */
    698 static struct vbuf_render *
    699 i915_vbuf_render_create(struct i915_context *i915)
    700 {
    701    struct i915_vbuf_render *i915_render = CALLOC_STRUCT(i915_vbuf_render);
    702    struct i915_winsys *iws = i915->iws;
    703    int i;
    704 
    705    i915_render->i915 = i915;
    706 
    707    i915_render->base.max_vertex_buffer_bytes = 16*4096;
    708 
    709    /* NOTE: it must be such that state and vertices indices fit in a single
    710     * batch buffer.
    711     */
    712    i915_render->base.max_indices = 16*1024;
    713 
    714    i915_render->base.get_vertex_info = i915_vbuf_render_get_vertex_info;
    715    i915_render->base.allocate_vertices = i915_vbuf_render_allocate_vertices;
    716    i915_render->base.map_vertices = i915_vbuf_render_map_vertices;
    717    i915_render->base.unmap_vertices = i915_vbuf_render_unmap_vertices;
    718    i915_render->base.set_primitive = i915_vbuf_render_set_primitive;
    719    i915_render->base.draw_elements = i915_vbuf_render_draw_elements;
    720    i915_render->base.draw_arrays = i915_vbuf_render_draw_arrays;
    721    i915_render->base.release_vertices = i915_vbuf_render_release_vertices;
    722    i915_render->base.destroy = i915_vbuf_render_destroy;
    723 
    724 #ifndef VBUF_MAP_BUFFER
    725    i915_render->map_size = 0;
    726    i915_render->map_used_start = 0;
    727    i915_render->map_used_end = 0;
    728 #endif
    729 
    730    i915_render->vbo = NULL;
    731    i915_render->vbo_ptr = NULL;
    732    i915_render->vbo_size = 0;
    733    i915_render->vbo_hw_offset = 0;
    734    i915_render->vbo_sw_offset = 0;
    735    i915_render->vbo_alloc_size = i915_render->base.max_vertex_buffer_bytes * 4;
    736 
    737 #ifdef VBUF_USE_POOL
    738    i915_render->pool_used = FALSE;
    739    i915_render->pool_buffer_size = i915_render->vbo_alloc_size;
    740    i915_render->pool_fifo = u_fifo_create(6);
    741    for (i = 0; i < 6; i++)
    742       u_fifo_add(i915_render->pool_fifo,
    743                  iws->buffer_create(iws, i915_render->pool_buffer_size,
    744                                     I915_NEW_VERTEX));
    745 #else
    746    (void)i;
    747    (void)iws;
    748 #endif
    749 
    750    return &i915_render->base;
    751 }
    752 
    753 /**
    754  * Create a new primitive vbuf/render stage.
    755  */
    756 struct draw_stage *i915_draw_vbuf_stage(struct i915_context *i915)
    757 {
    758    struct vbuf_render *render;
    759    struct draw_stage *stage;
    760 
    761    render = i915_vbuf_render_create(i915);
    762    if(!render)
    763       return NULL;
    764 
    765    stage = draw_vbuf_stage(i915->draw, render);
    766    if(!stage) {
    767       render->destroy(render);
    768       return NULL;
    769    }
    770    /** TODO JB: this shouldn't be here */
    771    draw_set_render(i915->draw, render);
    772 
    773    return stage;
    774 }
    775