1 /* 2 * Copyright 2011 Christoph Bumiller 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 18 * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF 19 * OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 20 * SOFTWARE. 21 */ 22 23 #ifndef __NV50_IR_INLINES_H__ 24 #define __NV50_IR_INLINES_H__ 25 26 static inline CondCode reverseCondCode(CondCode cc) 27 { 28 static const uint8_t ccRev[8] = { 0, 4, 2, 6, 1, 5, 3, 7 }; 29 30 return static_cast<CondCode>(ccRev[cc & 7] | (cc & ~7)); 31 } 32 33 static inline CondCode inverseCondCode(CondCode cc) 34 { 35 return static_cast<CondCode>(cc ^ 7); 36 } 37 38 static inline bool isMemoryFile(DataFile f) 39 { 40 return (f >= FILE_MEMORY_CONST && f <= FILE_MEMORY_LOCAL); 41 } 42 43 // contrary to asTex(), this will never include SULD/SUST 44 static inline bool isTextureOp(operation op) 45 { 46 return (op >= OP_TEX && op <= OP_TEXCSAA); 47 } 48 49 static inline unsigned int typeSizeof(DataType ty) 50 { 51 switch (ty) { 52 case TYPE_U8: 53 case TYPE_S8: 54 return 1; 55 case TYPE_F16: 56 case TYPE_U16: 57 case TYPE_S16: 58 return 2; 59 case TYPE_F32: 60 case TYPE_U32: 61 case TYPE_S32: 62 return 4; 63 case TYPE_F64: 64 case TYPE_U64: 65 case TYPE_S64: 66 return 8; 67 case TYPE_B96: 68 return 12; 69 case TYPE_B128: 70 return 16; 71 default: 72 return 0; 73 } 74 } 75 76 static inline DataType typeOfSize(unsigned int size, 77 bool flt = false, bool sgn = false) 78 { 79 switch (size) { 80 case 1: return sgn ? TYPE_S8 : TYPE_U8; 81 case 2: return flt ? TYPE_F16 : (sgn ? TYPE_S16 : TYPE_U16); 82 case 8: return flt ? TYPE_F64 : (sgn ? TYPE_S64 : TYPE_U64); 83 case 12: return TYPE_B96; 84 case 16: return TYPE_B128; 85 case 4: 86 return flt ? TYPE_F32 : (sgn ? TYPE_S32 : TYPE_U32); 87 default: 88 return TYPE_NONE; 89 } 90 } 91 92 static inline bool isFloatType(DataType ty) 93 { 94 return (ty >= TYPE_F16 && ty <= TYPE_F64); 95 } 96 97 static inline bool isSignedIntType(DataType ty) 98 { 99 return (ty == TYPE_S8 || ty == TYPE_S16 || ty == TYPE_S32); 100 } 101 102 static inline bool isSignedType(DataType ty) 103 { 104 switch (ty) { 105 case TYPE_NONE: 106 case TYPE_U8: 107 case TYPE_U16: 108 case TYPE_U32: 109 case TYPE_B96: 110 case TYPE_B128: 111 return false; 112 default: 113 return true; 114 } 115 } 116 117 static inline DataType intTypeToSigned(DataType ty) 118 { 119 switch (ty) { 120 case TYPE_U32: return TYPE_S32; 121 case TYPE_U16: return TYPE_S16; 122 case TYPE_U8: return TYPE_S8; 123 default: 124 return ty; 125 } 126 } 127 128 const ValueRef *ValueRef::getIndirect(int dim) const 129 { 130 return isIndirect(dim) ? &insn->src(indirect[dim]) : NULL; 131 } 132 133 DataFile ValueRef::getFile() const 134 { 135 return value ? value->reg.file : FILE_NULL; 136 } 137 138 unsigned int ValueRef::getSize() const 139 { 140 return value ? value->reg.size : 0; 141 } 142 143 Value *ValueRef::rep() const 144 { 145 assert(value); 146 return value->join; 147 } 148 149 Value *ValueDef::rep() const 150 { 151 assert(value); 152 return value->join; 153 } 154 155 DataFile ValueDef::getFile() const 156 { 157 return value ? value->reg.file : FILE_NULL; 158 } 159 160 unsigned int ValueDef::getSize() const 161 { 162 return value ? value->reg.size : 0; 163 } 164 165 void ValueDef::setSSA(LValue *lval) 166 { 167 origin = value->asLValue(); 168 set(lval); 169 } 170 171 const LValue *ValueDef::preSSA() const 172 { 173 return origin; 174 } 175 176 Instruction *Value::getInsn() const 177 { 178 return defs.empty() ? NULL : defs.front()->getInsn(); 179 } 180 181 Instruction *Value::getUniqueInsn() const 182 { 183 if (defs.empty()) 184 return NULL; 185 186 // after regalloc, the definitions of coalesced values are linked 187 if (join != this) { 188 for (DefCIterator it = defs.begin(); it != defs.end(); ++it) 189 if ((*it)->get() == this) 190 return (*it)->getInsn(); 191 // should be unreachable and trigger assertion at the end 192 } 193 #ifdef DEBUG 194 if (reg.data.id < 0) { 195 int n = 0; 196 for (DefCIterator it = defs.begin(); n < 2 && it != defs.end(); ++it) 197 if ((*it)->get() == this) // don't count joined values 198 ++n; 199 if (n > 1) 200 WARN("value %%%i not uniquely defined\n", id); // return NULL ? 201 } 202 #endif 203 assert(defs.front()->get() == this); 204 return defs.front()->getInsn(); 205 } 206 207 inline bool Instruction::constrainedDefs() const 208 { 209 return defExists(1) || op == OP_UNION; 210 } 211 212 Value *Instruction::getIndirect(int s, int dim) const 213 { 214 return srcs[s].isIndirect(dim) ? getSrc(srcs[s].indirect[dim]) : NULL; 215 } 216 217 Value *Instruction::getPredicate() const 218 { 219 return (predSrc >= 0) ? getSrc(predSrc) : NULL; 220 } 221 222 void Instruction::setFlagsDef(int d, Value *val) 223 { 224 if (val) { 225 if (flagsDef < 0) 226 flagsDef = d; 227 setDef(flagsDef, val); 228 } else { 229 if (flagsDef >= 0) { 230 setDef(flagsDef, NULL); 231 flagsDef = -1; 232 } 233 } 234 } 235 236 void Instruction::setFlagsSrc(int s, Value *val) 237 { 238 flagsSrc = s; 239 setSrc(flagsSrc, val); 240 } 241 242 Value *TexInstruction::getIndirectR() const 243 { 244 return tex.rIndirectSrc >= 0 ? getSrc(tex.rIndirectSrc) : NULL; 245 } 246 247 Value *TexInstruction::getIndirectS() const 248 { 249 return tex.rIndirectSrc >= 0 ? getSrc(tex.rIndirectSrc) : NULL; 250 } 251 252 CmpInstruction *Instruction::asCmp() 253 { 254 if (op >= OP_SET_AND && op <= OP_SLCT && op != OP_SELP) 255 return static_cast<CmpInstruction *>(this); 256 return NULL; 257 } 258 259 const CmpInstruction *Instruction::asCmp() const 260 { 261 if (op >= OP_SET_AND && op <= OP_SLCT && op != OP_SELP) 262 return static_cast<const CmpInstruction *>(this); 263 return NULL; 264 } 265 266 FlowInstruction *Instruction::asFlow() 267 { 268 if (op >= OP_BRA && op <= OP_JOIN) 269 return static_cast<FlowInstruction *>(this); 270 return NULL; 271 } 272 273 const FlowInstruction *Instruction::asFlow() const 274 { 275 if (op >= OP_BRA && op <= OP_JOINAT) 276 return static_cast<const FlowInstruction *>(this); 277 return NULL; 278 } 279 280 TexInstruction *Instruction::asTex() 281 { 282 if (op >= OP_TEX && op <= OP_TEXCSAA) 283 return static_cast<TexInstruction *>(this); 284 return NULL; 285 } 286 287 const TexInstruction *Instruction::asTex() const 288 { 289 if (op >= OP_TEX && op <= OP_TEXCSAA) 290 return static_cast<const TexInstruction *>(this); 291 return NULL; 292 } 293 294 static inline Instruction *cloneForward(Function *ctx, Instruction *obj) 295 { 296 DeepClonePolicy<Function> pol(ctx); 297 298 for (int i = 0; obj->srcExists(i); ++i) 299 pol.set(obj->getSrc(i), obj->getSrc(i)); 300 301 return obj->clone(pol); 302 } 303 304 // XXX: use a virtual function so we're really really safe ? 305 LValue *Value::asLValue() 306 { 307 if (reg.file >= FILE_GPR && reg.file <= FILE_ADDRESS) 308 return static_cast<LValue *>(this); 309 return NULL; 310 } 311 312 Symbol *Value::asSym() 313 { 314 if (reg.file >= FILE_MEMORY_CONST) 315 return static_cast<Symbol *>(this); 316 return NULL; 317 } 318 319 const Symbol *Value::asSym() const 320 { 321 if (reg.file >= FILE_MEMORY_CONST) 322 return static_cast<const Symbol *>(this); 323 return NULL; 324 } 325 326 void Symbol::setOffset(int32_t offset) 327 { 328 reg.data.offset = offset; 329 } 330 331 void Symbol::setAddress(Symbol *base, int32_t offset) 332 { 333 baseSym = base; 334 reg.data.offset = offset; 335 } 336 337 void Symbol::setSV(SVSemantic sv, uint32_t index) 338 { 339 reg.data.sv.sv = sv; 340 reg.data.sv.index = index; 341 } 342 343 ImmediateValue *Value::asImm() 344 { 345 if (reg.file == FILE_IMMEDIATE) 346 return static_cast<ImmediateValue *>(this); 347 return NULL; 348 } 349 350 const ImmediateValue *Value::asImm() const 351 { 352 if (reg.file == FILE_IMMEDIATE) 353 return static_cast<const ImmediateValue *>(this); 354 return NULL; 355 } 356 357 Value *Value::get(Iterator &it) 358 { 359 return reinterpret_cast<Value *>(it.get()); 360 } 361 362 bool BasicBlock::reachableBy(const BasicBlock *by, const BasicBlock *term) 363 { 364 return cfg.reachableBy(&by->cfg, &term->cfg); 365 } 366 367 BasicBlock *BasicBlock::get(Iterator &iter) 368 { 369 return reinterpret_cast<BasicBlock *>(iter.get()); 370 } 371 372 BasicBlock *BasicBlock::get(Graph::Node *node) 373 { 374 assert(node); 375 return reinterpret_cast<BasicBlock *>(node->data); 376 } 377 378 Function *Function::get(Graph::Node *node) 379 { 380 assert(node); 381 return reinterpret_cast<Function *>(node->data); 382 } 383 384 LValue *Function::getLValue(int id) 385 { 386 assert((unsigned int)id < (unsigned int)allLValues.getSize()); 387 return reinterpret_cast<LValue *>(allLValues.get(id)); 388 } 389 390 #endif // __NV50_IR_INLINES_H__ 391