Home | History | Annotate | Download | only in svga

Lines Matching refs:sbuf

95 svga_buffer_destroy_hw_storage(struct svga_screen *ss, struct svga_buffer *sbuf)
99 assert(sbuf->map.count == 0);
100 assert(sbuf->hwbuf);
101 if (sbuf->hwbuf) {
102 sws->buffer_destroy(sws, sbuf->hwbuf);
103 sbuf->hwbuf = NULL;
116 struct svga_buffer *sbuf)
118 assert(!sbuf->user);
121 assert(sbuf->handle || !sbuf->dma.pending);
122 return svga_buffer_create_host_surface(ss, sbuf);
124 if (!sbuf->hwbuf) {
128 unsigned size = sbuf->b.b.width0;
130 sbuf->hwbuf = sws->buffer_create(sws, alignment, usage, size);
131 if (!sbuf->hwbuf)
134 assert(!sbuf->dma.pending);
144 struct svga_buffer *sbuf)
146 assert(!sbuf->user);
148 if (!sbuf->handle) {
151 sbuf->key.flags = 0;
153 sbuf->key.format = SVGA3D_BUFFER;
154 if (sbuf->bind_flags & PIPE_BIND_VERTEX_BUFFER) {
155 sbuf->key.flags |= SVGA3D_SURFACE_HINT_VERTEXBUFFER;
156 sbuf->key.flags |= SVGA3D_SURFACE_BIND_VERTEX_BUFFER;
158 if (sbuf->bind_flags & PIPE_BIND_INDEX_BUFFER) {
159 sbuf->key.flags |= SVGA3D_SURFACE_HINT_INDEXBUFFER;
160 sbuf->key.flags |= SVGA3D_SURFACE_BIND_INDEX_BUFFER;
162 if (sbuf->bind_flags & PIPE_BIND_CONSTANT_BUFFER)
163 sbuf->key.flags |= SVGA3D_SURFACE_BIND_CONSTANT_BUFFER;
165 if (sbuf->bind_flags & PIPE_BIND_STREAM_OUTPUT)
166 sbuf->key.flags |= SVGA3D_SURFACE_BIND_STREAM_OUTPUT;
168 if (sbuf->bind_flags & PIPE_BIND_SAMPLER_VIEW)
169 sbuf->key.flags |= SVGA3D_SURFACE_BIND_SHADER_RESOURCE;
171 if (!sbuf->bind_flags && sbuf->b.b.usage == PIPE_USAGE_STAGING) {
176 sbuf->key.flags = SVGA3D_SURFACE_TRANSFER_FROM_BUFFER;
179 sbuf->key.size.width = sbuf->b.b.width0;
180 sbuf->key.size.height = 1;
181 sbuf->key.size.depth = 1;
183 sbuf->key.numFaces = 1;
184 sbuf->key.numMipLevels = 1;
185 sbuf->key.cachable = 1;
186 sbuf->key.arraySize = 1;
189 sbuf->b.b.width0);
191 sbuf->handle = svga_screen_surface_create(ss, sbuf->b.b.bind,
192 sbuf->b.b.usage,
193 &validated, &sbuf->key);
194 if (!sbuf->handle)
201 sbuf->dma.flags.discard = TRUE;
204 sbuf->handle, sbuf->b.b.width0);
213 struct svga_buffer *sbuf)
215 if (sbuf->handle) {
217 sbuf->handle, sbuf->b.b.width0);
218 svga_screen_surface_destroy(ss, &sbuf->key, &sbuf->handle);
231 struct svga_buffer *sbuf)
236 const uint32 numBoxes = sbuf->map.num_ranges;
242 assert(sbuf->dma.updates == NULL);
244 if (sbuf->dma.flags.discard) {
263 swc->surface_relocation(swc, &invalidate_cmd->image.sid, NULL, sbuf->handle,
297 swc->surface_relocation(swc, &update_cmd->image.sid, NULL, sbuf->handle,
305 sbuf->dma.updates = whole_update_cmd;
313 memcpy(whole_update_cmd, sbuf->dma.updates, sizeof(*whole_update_cmd));
316 sbuf->handle,
321 sbuf->dma.svga = svga;
323 pipe_resource_reference(&dummy, &sbuf->b.b);
327 sbuf->dma.flags.discard = FALSE;
342 struct svga_buffer *sbuf)
345 struct svga_winsys_buffer *guest = sbuf->hwbuf;
346 struct svga_winsys_surface *host = sbuf->handle;
349 const uint32 numBoxes = sbuf->map.num_ranges;
389 sbuf->dma.boxes = (SVGA3dCopyBox *)&cmd[1];
390 sbuf->dma.svga = svga;
394 pipe_resource_reference(&dummy, &sbuf->b.b);
398 pSuffix->maximumOffset = sbuf->b.b.width0;
399 pSuffix->flags = sbuf->dma.flags;
404 sbuf->dma.flags.discard = FALSE;
416 svga_buffer_upload_command(struct svga_context *svga, struct svga_buffer *sbuf)
419 return svga_buffer_upload_gb_command(svga, sbuf);
421 return svga_buffer_upload_hb_command(svga, sbuf);
432 struct svga_buffer *sbuf)
437 if (!sbuf->dma.pending) {
442 assert(sbuf->handle);
443 assert(sbuf->map.num_ranges);
444 assert(sbuf->dma.svga == svga);
450 struct svga_3d_update_gb_image *update = sbuf->dma.updates;
453 for (i = 0; i < sbuf->map.num_ranges; ++i, ++update) {
457 sbuf->map.ranges[i].start, sbuf->map.ranges[i].end);
459 box->x = sbuf->map.ranges[i].start;
462 box->w = sbuf->map.ranges[i].end - sbuf->map.ranges[i].start;
466 assert(box->x <= sbuf->b.b.width0);
467 assert(box->x + box->w <= sbuf->b.b.width0);
474 assert(sbuf->hwbuf);
475 assert(sbuf->dma.boxes);
476 SVGA_DBG(DEBUG_DMA, "dma to sid %p\n", sbuf->handle);
478 for (i = 0; i < sbuf->map.num_ranges; ++i) {
479 SVGA3dCopyBox *box = sbuf->dma.boxes + i;
482 sbuf->map.ranges[i].start, sbuf->map.ranges[i].end);
484 box->x = sbuf->map.ranges[i].start;
487 box->w = sbuf->map.ranges[i].end - sbuf->map.ranges[i].start;
490 box->srcx = sbuf->map.ranges[i].start;
494 assert(box->x <= sbuf->b.b.width0);
495 assert(box->x + box->w <= sbuf->b.b.width0);
502 /* Reset sbuf for next use/upload */
504 sbuf->map.num_ranges = 0;
506 assert(sbuf->head.prev && sbuf->head.next);
507 LIST_DEL(&sbuf->head); /* remove from svga->dirty_buffers list */
509 sbuf->head.next = sbuf->head.prev = NULL;
511 sbuf->dma.pending = FALSE;
512 sbuf->dma.flags.discard = FALSE;
513 sbuf->dma.flags.unsynchronized = FALSE;
515 sbuf->dma.svga = NULL;
516 sbuf->dma.boxes = NULL;
517 sbuf->dma.updates = NULL;
520 dummy = &sbuf->b.b;
536 svga_buffer_add_range(struct svga_buffer *sbuf, unsigned start, unsigned end)
544 if (sbuf->map.num_ranges < SVGA_BUFFER_MAX_RANGES) {
545 nearest_range = sbuf->map.num_ranges;
555 for (i = 0; i < sbuf->map.num_ranges; ++i) {
556 const int left_dist = start - sbuf->map.ranges[i].end;
557 const int right_dist = sbuf->map.ranges[i].start - end;
569 sbuf->map.ranges[i].start = MIN2(sbuf->map.ranges[i].start, start);
570 sbuf->map.ranges[i].end = MAX2(sbuf->map.ranges[i].end, end);
589 svga_buffer_upload_flush(sbuf->dma.svga, sbuf);
591 assert(!sbuf->dma.pending);
592 assert(!sbuf->dma.svga);
593 assert(!sbuf->dma.boxes);
595 if (sbuf->map.num_ranges < SVGA_BUFFER_MAX_RANGES) {
600 sbuf->map.ranges[sbuf->map.num_ranges].start = start;
601 sbuf->map.ranges[sbuf->map.num_ranges].end = end;
602 ++sbuf->map.num_ranges;
612 assert(nearest_range < sbuf->map.num_ranges);
613 sbuf->map.ranges[nearest_range].start =
614 MIN2(sbuf->map.ranges[nearest_range].start, start);
615 sbuf->map.ranges[nearest_range].end =
616 MAX2(sbuf->map.ranges[nearest_range].end, end);
626 svga_buffer_update_hw(struct svga_context *svga, struct svga_buffer *sbuf)
628 assert(!sbuf->user);
629 if (!svga_buffer_has_hw_storage(sbuf)) {
630 struct svga_screen *ss = svga_screen(sbuf->b.b.screen);
636 assert(sbuf->swbuf);
637 if (!sbuf->swbuf)
640 ret = svga_buffer_create_hw_storage(svga_screen(sbuf->b.b.screen), sbuf);
645 map = svga_buffer_hw_storage_map(svga, sbuf, PIPE_TRANSFER_WRITE, &retry);
650 svga_buffer_destroy_hw_storage(ss, sbuf);
655 for (i = 0; i < sbuf->map.num_ranges; i++) {
656 unsigned start = sbuf->map.ranges[i].start;
657 unsigned len = sbuf->map.ranges[i].end - start;
658 memcpy((uint8_t *) map + start, (uint8_t *) sbuf->swbuf + start, len);
661 svga_buffer_hw_storage_unmap(svga, sbuf);
664 assert(sbuf->map.count == 0);
665 if (sbuf->map.count == 0) {
666 if (sbuf->user)
667 sbuf->user = FALSE;
669 align_free(sbuf->swbuf);
670 sbuf->swbuf = NULL;
691 struct svga_buffer *sbuf)
698 assert(sbuf->map.num_ranges);
699 assert(!sbuf->dma.pending);
702 SVGA_DBG(DEBUG_DMA, "dma to sid %p\n", sbuf->handle);
704 for (i = 0; i < sbuf->map.num_ranges; ++i) {
705 const struct svga_buffer_range *range = &sbuf->map.ranges[i];
733 memcpy(map, (const char *) sbuf->swbuf + offset, size);
738 hwbuf, sbuf->handle,
740 size, 0, offset, sbuf->dma.flags);
744 hwbuf, sbuf->handle,
746 size, 0, offset, sbuf->dma.flags);
750 sbuf->dma.flags.discard = FALSE;
758 sbuf->map.num_ranges = 0;
776 struct svga_buffer *sbuf;
782 sbuf = svga_buffer(buf);
784 assert(!sbuf->user);
786 if (!sbuf->handle) {
787 /* This call will set sbuf->handle */
789 ret = svga_buffer_update_hw(svga, sbuf);
791 ret = svga_buffer_create_host_surface(ss, sbuf);
797 assert(sbuf->handle);
799 if (sbuf->map.num_ranges) {
800 if (!sbuf->dma.pending) {
804 ret = svga_buffer_update_hw(svga, sbuf);
807 ret = svga_buffer_upload_command(svga, sbuf);
810 ret = svga_buffer_upload_command(svga, sbuf);
814 sbuf->dma.pending = TRUE;
815 assert(!sbuf->head.prev && !sbuf->head.next);
816 LIST_ADDTAIL(&sbuf->head, &svga->dirty_buffers);
824 ret = svga_buffer_upload_piecewise(ss, svga, sbuf);
833 sbuf->map.num_ranges = 0;
840 assert(sbuf->dma.svga == svga);
844 assert(sbuf->map.num_ranges == 0 || sbuf->dma.pending);
846 return sbuf->handle;
861 struct svga_buffer *sbuf = LIST_ENTRY(struct svga_buffer, curr, head);
863 assert(p_atomic_read(&sbuf->b.b.reference.count) != 0);
864 assert(sbuf->dma.pending);
866 svga_buffer_upload_flush(svga, sbuf);