Home | History | Annotate | Download | only in radeonsi

Lines Matching refs:sctx

136 static bool si_ce_upload(struct si_context *sctx, unsigned ce_offset, unsigned size,
140 u_suballocator_alloc(sctx->ce_suballocator, size, 64, out_offset,
147 radeon_emit(sctx->ce_ib, PKT3(PKT3_DUMP_CONST_RAM, 3, 0));
148 radeon_emit(sctx->ce_ib, ce_offset);
149 radeon_emit(sctx->ce_ib, size / 4);
150 radeon_emit(sctx->ce_ib, va);
151 radeon_emit(sctx->ce_ib, va >> 32);
153 radeon_add_to_buffer_list(&sctx->b, &sctx->b.gfx, *out_buf,
156 sctx->ce_need_synchronization = true;
160 static void si_ce_reinitialize_descriptors(struct si_context *sctx,
167 struct radeon_winsys_cs *ib = sctx->ce_preamble_ib;
170 ib = sctx->ce_ib;
180 radeon_add_to_buffer_list(&sctx->b, &sctx->b.gfx, desc->buffer,
186 void si_ce_reinitialize_all_descriptors(struct si_context *sctx)
191 si_ce_reinitialize_descriptors(sctx, &sctx->descriptors[i]);
202 static bool si_upload_descriptors(struct si_context *sctx,
211 if (sctx->ce_ib) {
215 si_ce_reinitialize_descriptors(sctx, desc);
225 radeon_emit(sctx->ce_ib,
227 radeon_emit(sctx->ce_ib, desc->ce_offset + begin * 4);
228 radeon_emit_array(sctx->ce_ib, list + begin, count);
231 if (!si_ce_upload(sctx, desc->ce_offset, list_size,
237 u_upload_alloc(sctx->b.uploader, 0, list_size, 256,
246 radeon_add_to_buffer_list(&sctx->b, &sctx->b.gfx, desc->buffer,
252 si_mark_atom_dirty(sctx, atom);
258 si_descriptors_begin_new_cs(struct si_context *sctx, struct si_descriptors *desc)
265 radeon_add_to_buffer_list(&sctx->b, &sctx->b.gfx, desc->buffer,
279 si_sampler_descriptors(struct si_context *sctx, unsigned shader)
281 return &sctx->descriptors[si_sampler_descriptors_idx(shader)];
293 static void si_sampler_view_add_buffer(struct si_context *sctx,
316 radeon_add_to_buffer_list_check_mem(&sctx->b, &sctx->b.gfx,
326 radeon_add_to_buffer_list_check_mem(&sctx->b, &sctx->b.gfx,
334 radeon_add_to_buffer_list_check_mem(&sctx->b, &sctx->b.gfx,
340 static void si_sampler_views_begin_new_cs(struct si_context *sctx,
350 si_sampler_view_add_buffer(sctx, sview->base.texture,
415 static void si_set_sampler_view(struct si_context *sctx,
420 struct si_sampler_views *views = &sctx->samplers[shader].views;
422 struct si_descriptors *descs = si_sampler_descriptors(sctx, shader);
473 si_sampler_view_add_buffer(sctx, view->texture,
490 sctx->descriptors_dirty |= 1u << si_sampler_descriptors_idx(shader);
499 static void si_update_compressed_tex_shader_mask(struct si_context *sctx,
502 struct si_textures_info *samplers = &sctx->samplers[shader];
507 sctx->images[shader].compressed_colortex_mask)
508 sctx->compressed_tex_shader_mask |= shader_bit;
510 sctx->compressed_tex_shader_mask &= ~shader_bit;
518 struct si_context *sctx = (struct si_context *)ctx;
519 struct si_textures_info *samplers = &sctx->samplers[shader];
531 si_set_sampler_view(sctx, shader, slot, NULL, false);
535 si_set_sampler_view(sctx, shader, slot, views[i], false);
556 sctx->need_check_render_feedback = true;
563 si_update_compressed_tex_shader_mask(sctx, shader);
597 si_image_descriptors(struct si_context *sctx, unsigned shader)
599 return &sctx->descriptors[si_image_descriptors_idx(shader)];
615 si_image_views_begin_new_cs(struct si_context *sctx, struct si_images_info *images)
626 si_sampler_view_add_buffer(sctx, view->resource,
816 struct si_context *sctx = (struct si_context *)ctx;
817 struct si_textures_info *samplers = &sctx->samplers[shader];
818 struct si_descriptors *desc = si_sampler_descriptors(sctx, shader);
848 sctx->descriptors_dirty |= 1u << si_sampler_descriptors_idx(shader);
882 static void si_buffer_resources_begin_new_cs(struct si_context *sctx,
891 radeon_add_to_buffer_list(&sctx->b, &sctx->b.gfx,
920 static void si_vertex_buffers_begin_new_cs(struct si_context *sctx)
922 struct si_descriptors *desc = &sctx->vertex_buffers;
923 int count = sctx->vertex_elements ? sctx->vertex_elements->count : 0;
927 int vb = sctx->vertex_elements->elements[i].vertex_buffer_index;
929 if (vb >= ARRAY_SIZE(sctx->vertex_buffer))
931 if (!sctx->vertex_buffer[vb].buffer)
934 radeon_add_to_buffer_list(&sctx->b, &sctx->b.gfx,
935 (struct r600_resource*)sctx->vertex_buffer[vb].buffer,
941 radeon_add_to_buffer_list(&sctx->b, &sctx->b.gfx,
946 bool si_upload_vertex_buffer_descriptors(struct si_context *sctx)
948 struct si_vertex_element *velems = sctx->vertex_elements;
949 struct si_descriptors *desc = &sctx->vertex_buffers;
954 if (!sctx->vertex_buffers_dirty || !count || !velems)
964 u_upload_alloc(sctx->b.uploader, 0, count * 16, 256, &desc->buffer_offset,
969 radeon_add_to_buffer_list(&sctx->b, &sctx->b.gfx,
983 vb = &sctx->vertex_buffer[vbo_index];
998 if (sctx->b.chip_class <= CIK && vb->stride) {
1031 radeon_add_to_buffer_list(&sctx->b, &sctx->b.gfx,
1041 si_mark_atom_dirty(sctx, &sctx->shader_userdata.atom);
1042 sctx->vertex_buffers_dirty = false;
1043 sctx->vertex_buffer_pointer_dirty = true;
1058 si_const_buffer_descriptors(struct si_context *sctx, unsigned shader)
1060 return &sctx->descriptors[si_const_buffer_descriptors_idx(shader)];
1063 void si_upload_const_buffer(struct si_context *sctx, struct r600_resource **rbuffer,
1068 u_upload_alloc(sctx->b.uploader, 0, size, 256, const_offset,
1074 static void si_set_constant_buffer(struct si_context *sctx,
1079 struct si_descriptors *descs = &sctx->descriptors[descriptors_idx];
1085 if (sctx->b.chip_class == CIK &&
1087 input = &sctx->null_const_buf;
1097 sctx,
1102 si_set_constant_buffer(sctx, buffers, descriptors_idx, slot, NULL);
1127 radeon_add_to_buffer_list_check_mem(&sctx->b, &sctx->b.gfx,
1139 sctx->descriptors_dirty |= 1u << descriptors_idx;
1142 void si_set_rw_buffer(struct si_context *sctx,
1145 si_set_constant_buffer(sctx, &sctx->rw_buffers,
1153 struct si_context *sctx = (struct si_context *)ctx;
1158 si_set_constant_buffer(sctx, &sctx->const_buffers[shader],
1163 void si_get_pipe_constant_buffer(struct si_context *sctx, uint shader,
1168 &sctx->const_buffers[shader],
1169 si_const_buffer_descriptors(sctx, shader),
1183 si_shader_buffer_descriptors(struct si_context *sctx,
1186 return &sctx->descriptors[si_shader_buffer_descriptors_idx(shader)];
1194 struct si_context *sctx = (struct si_context *)ctx;
1195 struct si_buffer_resources *buffers = &sctx->shader_buffers[shader];
1196 struct si_descriptors *descs = si_shader_buffer_descriptors(sctx, shader);
1213 sctx->descriptors_dirty |=
1233 radeon_add_to_buffer_list_check_mem(&sctx->b, &sctx->b.gfx, buf,
1240 sctx->descriptors_dirty |=
1248 void si_get_shader_buffers(struct si_context *sctx, uint shader,
1252 struct si_buffer_resources *buffers = &sctx->shader_buffers[shader];
1253 struct si_descriptors *descs = si_shader_buffer_descriptors(sctx, shader);
1271 struct si_context *sctx = (struct si_context *)ctx;
1272 struct si_buffer_resources *buffers = &sctx->rw_buffers;
1273 struct si_descriptors *descs = &sctx->descriptors[SI_DESCS_RW_BUFFERS];
1322 if (sctx->b.chip_class >= VI && stride)
1343 radeon_add_to_buffer_list(&sctx->b, &sctx->b.gfx,
1354 sctx->descriptors_dirty |= 1u << SI_DESCS_RW_BUFFERS;
1364 struct si_context *sctx = (struct si_context *)ctx;
1365 struct si_buffer_resources *buffers = &sctx->rw_buffers;
1366 struct si_descriptors *descs = &sctx->descriptors[SI_DESCS_RW_BUFFERS];
1367 unsigned old_num_targets = sctx->b.streamout.num_targets;
1371 if (sctx->b.streamout.num_targets && sctx->b.streamout.begin_emitted) {
1381 for (i = 0; i < sctx->b.streamout.num_targets; i++)
1382 if (sctx->b.streamout.targets[i])
1383 r600_resource(sctx->b.streamout.targets[i]->b.buffer)->TC_L2_dirty = true;
1395 sctx->b.flags |= SI_CONTEXT_INV_SMEM_L1 |
1404 sctx->b.flags |= SI_CONTEXT_PS_PARTIAL_FLUSH |
1442 radeon_add_to_buffer_list_check_mem(&sctx->b, &sctx->b.gfx,
1469 sctx->descriptors_dirty |= 1u << SI_DESCS_RW_BUFFERS;
1493 struct si_context *sctx = (struct si_context *)ctx;
1504 si_set_rw_buffer(sctx, SI_PS_CONST_POLY_STIPPLE, &cb);
1513 void si_update_compressed_colortex_masks(struct si_context *sctx)
1516 si_samplers_update_compressed_colortex_mask(&sctx->samplers[i]);
1517 si_images_update_compressed_colortex_mask(&sctx->images[i]);
1518 si_update_compressed_tex_shader_mask(sctx, i);
1525 static void si_reset_buffer_resources(struct si_context *sctx,
1531 struct si_descriptors *descs = &sctx->descriptors[descriptors_idx];
1537 si_desc_reset_buffer_offset(&sctx->b.b,
1541 sctx->descriptors_dirty |= 1u << descriptors_idx;
1543 radeon_add_to_buffer_list_check_mem(&sctx->b, &sctx->b.gfx,
1560 struct si_context *sctx = (struct si_context*)ctx;
1564 unsigned num_elems = sctx->vertex_elements ?
1565 sctx->vertex_elements->count : 0;
1568 r600_alloc_resource(&sctx->screen->b, rbuffer);
1579 int vb = sctx->vertex_elements->elements[i].vertex_buffer_index;
1581 if (vb >= ARRAY_SIZE(sctx->vertex_buffer))
1583 if (!sctx->vertex_buffer[vb].buffer)
1586 if (sctx->vertex_buffer[vb].buffer == buf) {
1587 sctx->vertex_buffers_dirty = true;
1596 struct si_buffer_resources *buffers = &sctx->rw_buffers;
1598 &sctx->descriptors[SI_DESCS_RW_BUFFERS];
1606 sctx->descriptors_dirty |= 1u << SI_DESCS_RW_BUFFERS;
1608 radeon_add_to_buffer_list_check_mem(&sctx->b, &sctx->b.gfx,
1614 if (sctx->b.streamout.begin_emitted)
1615 r600_emit_streamout_end(&sctx->b);
1616 sctx->b.streamout.append_bitmask =
1617 sctx->b.streamout.enabled_mask;
1618 r600_streamout_buffers_dirty(&sctx->b);
1625 si_reset_buffer_resources(sctx, &sctx->const_buffers[shader],
1632 si_reset_buffer_resources(sctx, &sctx->shader_buffers[shader],
1640 struct si_sampler_views *views = &sctx->samplers[shader].views;
1642 si_sampler_descriptors(sctx, shader);
1653 sctx->descriptors_dirty |=
1656 radeon_add_to_buffer_list_check_mem(&sctx->b, &sctx->b.gfx,
1668 struct si_images_info *images = &sctx->images[shader];
1670 si_image_descriptors(sctx, shader);
1684 sctx->descriptors_dirty |=
1688 &sctx->b, &sctx->b.gfx, rbuffer,
1698 void si_update_all_texture_descriptors(struct si_context *sctx)
1703 struct si_sampler_views *samplers = &sctx->samplers[shader].views;
1704 struct si_images_info *images = &sctx->images[shader];
1717 si_set_shader_image(sctx, shader, i, view, true);
1731 si_set_sampler_view(sctx, shader, i,
1735 si_update_compressed_tex_shader_mask(sctx, shader);
1741 static void si_mark_shader_pointers_dirty(struct si_context *sctx,
1744 sctx->shader_pointers_dirty |=
1749 sctx->vertex_buffer_pointer_dirty = sctx->vertex_buffers.buffer != NULL;
1751 si_mark_atom_dirty(sctx, &sctx->shader_userdata.atom);
1754 static void si_shader_userdata_begin_new_cs(struct si_context *sctx)
1756 sctx->shader_pointers_dirty = u_bit_consecutive(0, SI_NUM_DESCS);
1757 sctx->vertex_buffer_pointer_dirty = sctx->vertex_buffers.buffer != NULL;
1758 si_mark_atom_dirty(sctx, &sctx->shader_userdata.atom);
1764 static void si_set_user_data_base(struct si_context *sctx,
1767 uint32_t *base = &sctx->shader_userdata.sh_base[shader];
1773 si_mark_shader_pointers_dirty(sctx, shader);
1783 void si_shader_change_notify(struct si_context *sctx)
1786 if (sctx->tes_shader.cso)
1787 si_set_user_data_base(sctx, PIPE_SHADER_VERTEX,
1789 else if (sctx->gs_shader.cso)
1790 si_set_user_data_base(sctx, PIPE_SHADER_VERTEX,
1793 si_set_user_data_base(sctx, PIPE_SHADER_VERTEX,
1797 if (sctx->tes_shader.cso) {
1798 if (sctx->gs_shader.cso)
1799 si_set_user_data_base(sctx, PIPE_SHADER_TESS_EVAL,
1802 si_set_user_data_base(sctx, PIPE_SHADER_TESS_EVAL,
1805 si_set_user_data_base(sctx, PIPE_SHADER_TESS_EVAL, 0);
1809 static void si_emit_shader_pointer(struct si_context *sctx,
1813 struct radeon_winsys_cs *cs = sctx->b.gfx.cs;
1827 void si_emit_graphics_shader_userdata(struct si_context *sctx,
1831 uint32_t *sh_base = sctx->shader_userdata.sh_base;
1834 descs = &sctx->descriptors[SI_DESCS_RW_BUFFERS];
1836 if (sctx->shader_pointers_dirty & (1 << SI_DESCS_RW_BUFFERS)) {
1837 si_emit_shader_pointer(sctx, descs,
1839 si_emit_shader_pointer(sctx, descs,
1841 si_emit_shader_pointer(sctx, descs,
1843 si_emit_shader_pointer(sctx, descs,
1845 si_emit_shader_pointer(sctx, descs,
1849 mask = sctx->shader_pointers_dirty &
1859 si_emit_shader_pointer(sctx, descs + i, base);
1861 sctx->shader_pointers_dirty &=
1864 if (sctx->vertex_buffer_pointer_dirty) {
1865 si_emit_shader_pointer(sctx, &sctx->vertex_buffers,
1867 sctx->vertex_buffer_pointer_dirty = false;
1871 void si_emit_compute_shader_userdata(struct si_context *sctx)
1874 struct si_descriptors *descs = sctx->descriptors;
1877 unsigned mask = sctx->shader_pointers_dirty & compute_mask;
1882 si_emit_shader_pointer(sctx, descs + i, base);
1884 sctx->shader_pointers_dirty &= ~compute_mask;
1889 void si_init_all_descriptors(struct si_context *sctx)
1895 si_init_buffer_resources(&sctx->const_buffers[i],
1896 si_const_buffer_descriptors(sctx, i),
1900 si_init_buffer_resources(&sctx->shader_buffers[i],
1901 si_shader_buffer_descriptors(sctx, i),
1906 si_init_descriptors(si_sampler_descriptors(sctx, i),
1910 si_init_descriptors(si_image_descriptors(sctx, i),
1915 si_init_buffer_resources(&sctx->rw_buffers,
1916 &sctx->descriptors[SI_DESCS_RW_BUFFERS],
1920 si_init_descriptors(&sctx->vertex_buffers, SI_SGPR_VERTEX_BUFFERS,
1923 sctx->descriptors_dirty = u_bit_consecutive(0, SI_NUM_DESCS);
1928 sctx->b.b.bind_sampler_states = si_bind_sampler_states;
1929 sctx->b.b.set_shader_images = si_set_shader_images;
1930 sctx->b.b.set_constant_buffer = si_pipe_set_constant_buffer;
1931 sctx->b.b.set_polygon_stipple = si_set_polygon_stipple;
1932 sctx->b.b.set_shader_buffers = si_set_shader_buffers;
1933 sctx->b.b.set_sampler_views = si_set_sampler_views;
1934 sctx->b.b.set_stream_output_targets = si_set_streamout_targets;
1935 sctx->b.invalidate_buffer = si_invalidate_buffer;
1938 si_init_atom(sctx, &sctx->shader_userdata.atom, &sctx->atoms.s.shader_userdata,
1942 si_set_user_data_base(sctx, PIPE_SHADER_VERTEX, R_00B130_SPI_SHADER_USER_DATA_VS_0);
1943 si_set_user_data_base(sctx, PIPE_SHADER_TESS_CTRL, R_00B430_SPI_SHADER_USER_DATA_HS_0);
1944 si_set_user_data_base(sctx, PIPE_SHADER_GEOMETRY, R_00B230_SPI_SHADER_USER_DATA_GS_0);
1945 si_set_user_data_base(sctx, PIPE_SHADER_FRAGMENT, R_00B030_SPI_SHADER_USER_DATA_PS_0);
1948 bool si_upload_graphics_shader_descriptors(struct si_context *sctx)
1951 unsigned dirty = sctx->descriptors_dirty & mask;
1954 sctx->shader_pointers_dirty |= dirty;
1959 if (!si_upload_descriptors(sctx, &sctx->descriptors[i],
1960 &sctx->shader_userdata.atom))
1964 sctx->descriptors_dirty &= ~mask;
1968 bool si_upload_compute_shader_descriptors(struct si_context *sctx)
1975 unsigned dirty = sctx->descriptors_dirty & mask;
1978 sctx->shader_pointers_dirty |= dirty;
1983 if (!si_upload_descriptors(sctx, &sctx->descriptors[i], NULL))
1987 sctx->descriptors_dirty &= ~mask;
1992 void si_release_all_descriptors(struct si_context *sctx)
1997 si_release_buffer_resources(&sctx->const_buffers[i],
1998 si_const_buffer_descriptors(sctx, i));
1999 si_release_buffer_resources(&sctx->shader_buffers[i],
2000 si_shader_buffer_descriptors(sctx, i));
2001 si_release_sampler_views(&sctx->samplers[i].views);
2002 si_release_image_views(&sctx->images[i]);
2004 si_release_buffer_resources(&sctx->rw_buffers,
2005 &sctx->descriptors[SI_DESCS_RW_BUFFERS]);
2008 si_release_descriptors(&sctx->descriptors[i]);
2009 si_release_descriptors(&sctx->vertex_buffers);
2012 void si_all_descriptors_begin_new_cs(struct si_context *sctx)
2017 si_buffer_resources_begin_new_cs(sctx, &sctx->const_buffers[i]);
2018 si_buffer_resources_begin_new_cs(sctx, &sctx->shader_buffers[i]);
2019 si_sampler_views_begin_new_cs(sctx, &sctx->samplers[i].views);
2020 si_image_views_begin_new_cs(sctx, &sctx->images[i]);
2022 si_buffer_resources_begin_new_cs(sctx, &sctx->rw_buffers);
2023 si_vertex_buffers_begin_new_cs(sctx);
2026 si_descriptors_begin_new_cs(sctx, &sctx->descriptors[i]);
2028 si_shader_userdata_begin_new_cs(sctx);