1 /* 2 * Copyright 2017 Red Hat 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Rob Clark <robclark (at) freedesktop.org> 25 */ 26 27 #include "nir.h" 28 #include "nir_builder.h" 29 30 #if defined(_WIN32) && !defined(snprintf) 31 #define snprintf _snprintf 32 #endif 33 34 /* 35 * Remap atomic counters to SSBOs. Atomic counters get remapped to 36 * SSBO binding points [0..ssbo_offset) and the original SSBOs are 37 * remapped to [ssbo_offset..n) (mostly to align with what mesa/st 38 * does. 39 */ 40 41 static bool 42 lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b) 43 { 44 nir_intrinsic_op op; 45 int idx_src; 46 47 b->cursor = nir_before_instr(&instr->instr); 48 49 switch (instr->intrinsic) { 50 case nir_intrinsic_ssbo_atomic_add: 51 case nir_intrinsic_ssbo_atomic_imin: 52 case nir_intrinsic_ssbo_atomic_umin: 53 case nir_intrinsic_ssbo_atomic_imax: 54 case nir_intrinsic_ssbo_atomic_umax: 55 case nir_intrinsic_ssbo_atomic_and: 56 case nir_intrinsic_ssbo_atomic_or: 57 case nir_intrinsic_ssbo_atomic_xor: 58 case nir_intrinsic_ssbo_atomic_exchange: 59 case nir_intrinsic_ssbo_atomic_comp_swap: 60 case nir_intrinsic_store_ssbo: 61 case nir_intrinsic_load_ssbo: 62 case nir_intrinsic_get_buffer_size: 63 /* easy case, keep same opcode and just remap SSBO buffer index: */ 64 op = instr->intrinsic; 65 idx_src = (op == nir_intrinsic_store_ssbo) ? 1 : 0; 66 nir_ssa_def *old_idx = nir_ssa_for_src(b, instr->src[idx_src], 1); 67 nir_ssa_def *new_idx = nir_iadd(b, old_idx, nir_imm_int(b, ssbo_offset)); 68 nir_instr_rewrite_src(&instr->instr, 69 &instr->src[idx_src], 70 nir_src_for_ssa(new_idx)); 71 return true; 72 case nir_intrinsic_atomic_counter_inc: 73 case nir_intrinsic_atomic_counter_add: 74 case nir_intrinsic_atomic_counter_dec: 75 /* inc and dec get remapped to add: */ 76 op = nir_intrinsic_ssbo_atomic_add; 77 break; 78 case nir_intrinsic_atomic_counter_read: 79 op = nir_intrinsic_load_ssbo; 80 break; 81 case nir_intrinsic_atomic_counter_min: 82 op = nir_intrinsic_ssbo_atomic_umin; 83 break; 84 case nir_intrinsic_atomic_counter_max: 85 op = nir_intrinsic_ssbo_atomic_umax; 86 break; 87 case nir_intrinsic_atomic_counter_and: 88 op = nir_intrinsic_ssbo_atomic_and; 89 break; 90 case nir_intrinsic_atomic_counter_or: 91 op = nir_intrinsic_ssbo_atomic_or; 92 break; 93 case nir_intrinsic_atomic_counter_xor: 94 op = nir_intrinsic_ssbo_atomic_xor; 95 break; 96 case nir_intrinsic_atomic_counter_exchange: 97 op = nir_intrinsic_ssbo_atomic_exchange; 98 break; 99 case nir_intrinsic_atomic_counter_comp_swap: 100 op = nir_intrinsic_ssbo_atomic_comp_swap; 101 break; 102 default: 103 return false; 104 } 105 106 nir_ssa_def *buffer = nir_imm_int(b, nir_intrinsic_base(instr)); 107 nir_ssa_def *temp = NULL; 108 nir_intrinsic_instr *new_instr = 109 nir_intrinsic_instr_create(ralloc_parent(instr), op); 110 111 /* a couple instructions need special handling since they don't map 112 * 1:1 with ssbo atomics 113 */ 114 switch (instr->intrinsic) { 115 case nir_intrinsic_atomic_counter_inc: 116 /* remapped to ssbo_atomic_add: { buffer_idx, offset, +1 } */ 117 temp = nir_imm_int(b, +1); 118 new_instr->src[0] = nir_src_for_ssa(buffer); 119 nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr); 120 new_instr->src[2] = nir_src_for_ssa(temp); 121 break; 122 case nir_intrinsic_atomic_counter_dec: 123 /* remapped to ssbo_atomic_add: { buffer_idx, offset, -1 } */ 124 /* NOTE semantic difference so we adjust the return value below */ 125 temp = nir_imm_int(b, -1); 126 new_instr->src[0] = nir_src_for_ssa(buffer); 127 nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr); 128 new_instr->src[2] = nir_src_for_ssa(temp); 129 break; 130 case nir_intrinsic_atomic_counter_read: 131 /* remapped to load_ssbo: { buffer_idx, offset } */ 132 new_instr->src[0] = nir_src_for_ssa(buffer); 133 nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr); 134 break; 135 default: 136 /* remapped to ssbo_atomic_x: { buffer_idx, offset, data, (compare)? } */ 137 new_instr->src[0] = nir_src_for_ssa(buffer); 138 nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr); 139 nir_src_copy(&new_instr->src[2], &instr->src[1], new_instr); 140 if (op == nir_intrinsic_ssbo_atomic_comp_swap) 141 nir_src_copy(&new_instr->src[3], &instr->src[2], new_instr); 142 break; 143 } 144 145 nir_ssa_dest_init(&new_instr->instr, &new_instr->dest, 146 instr->dest.ssa.num_components, 147 instr->dest.ssa.bit_size, NULL); 148 nir_instr_insert_before(&instr->instr, &new_instr->instr); 149 nir_instr_remove(&instr->instr); 150 151 if (instr->intrinsic == nir_intrinsic_atomic_counter_dec) { 152 b->cursor = nir_after_instr(&new_instr->instr); 153 nir_ssa_def *result = nir_iadd(b, &new_instr->dest.ssa, temp); 154 nir_ssa_def_rewrite_uses(&instr->dest.ssa, nir_src_for_ssa(result)); 155 } else { 156 nir_ssa_def_rewrite_uses(&instr->dest.ssa, nir_src_for_ssa(&new_instr->dest.ssa)); 157 } 158 159 /* we could be replacing an intrinsic with fixed # of dest num_components 160 * with one that has variable number. So best to take this from the dest: 161 */ 162 new_instr->num_components = instr->dest.ssa.num_components; 163 164 return true; 165 } 166 167 static bool 168 is_atomic_uint(const struct glsl_type *type) 169 { 170 if (glsl_get_base_type(type) == GLSL_TYPE_ARRAY) 171 return is_atomic_uint(glsl_get_array_element(type)); 172 return glsl_get_base_type(type) == GLSL_TYPE_ATOMIC_UINT; 173 } 174 175 bool 176 nir_lower_atomics_to_ssbo(nir_shader *shader, unsigned ssbo_offset) 177 { 178 bool progress = false; 179 180 nir_foreach_function(function, shader) { 181 if (function->impl) { 182 nir_builder builder; 183 nir_builder_init(&builder, function->impl); 184 nir_foreach_block(block, function->impl) { 185 nir_foreach_instr_safe(instr, block) { 186 if (instr->type == nir_instr_type_intrinsic) 187 progress |= lower_instr(nir_instr_as_intrinsic(instr), 188 ssbo_offset, &builder); 189 } 190 } 191 192 nir_metadata_preserve(function->impl, nir_metadata_block_index | 193 nir_metadata_dominance); 194 } 195 } 196 197 if (progress) { 198 /* replace atomic_uint uniforms with ssbo's: */ 199 unsigned replaced = 0; 200 nir_foreach_variable_safe(var, &shader->uniforms) { 201 if (is_atomic_uint(var->type)) { 202 exec_node_remove(&var->node); 203 204 if (replaced & (1 << var->data.binding)) 205 continue; 206 207 nir_variable *ssbo; 208 char name[16]; 209 210 /* A length of 0 is used to denote unsized arrays */ 211 const struct glsl_type *type = glsl_array_type(glsl_uint_type(), 0); 212 213 snprintf(name, sizeof(name), "counter%d", var->data.binding); 214 215 ssbo = nir_variable_create(shader, nir_var_shader_storage, 216 type, name); 217 ssbo->data.binding = var->data.binding; 218 219 struct glsl_struct_field field = { 220 .type = type, 221 .name = "counters", 222 .location = -1, 223 }; 224 225 ssbo->interface_type = 226 glsl_interface_type(&field, 1, GLSL_INTERFACE_PACKING_STD430, 227 false, "counters"); 228 229 replaced |= (1 << var->data.binding); 230 } 231 } 232 } 233 234 return progress; 235 } 236 237