1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "mir_to_lir-inl.h" 18 19 #include "dex/compiler_ir.h" 20 #include "dex/mir_graph.h" 21 #include "invoke_type.h" 22 23 namespace art { 24 25 /* This file contains target-independent codegen and support. */ 26 27 /* 28 * Load an immediate value into a fixed or temp register. Target 29 * register is clobbered, and marked in_use. 30 */ 31 LIR* Mir2Lir::LoadConstant(RegStorage r_dest, int value) { 32 if (IsTemp(r_dest)) { 33 Clobber(r_dest); 34 MarkInUse(r_dest); 35 } 36 return LoadConstantNoClobber(r_dest, value); 37 } 38 39 /* 40 * Load a Dalvik register into a physical register. Take care when 41 * using this routine, as it doesn't perform any bookkeeping regarding 42 * register liveness. That is the responsibility of the caller. 43 */ 44 void Mir2Lir::LoadValueDirect(RegLocation rl_src, RegStorage r_dest) { 45 rl_src = rl_src.wide ? UpdateLocWide(rl_src) : UpdateLoc(rl_src); 46 if (rl_src.location == kLocPhysReg) { 47 OpRegCopy(r_dest, rl_src.reg); 48 } else if (IsInexpensiveConstant(rl_src)) { 49 // On 64-bit targets, will sign extend. Make sure constant reference is always null. 50 DCHECK(!rl_src.ref || (mir_graph_->ConstantValue(rl_src) == 0)); 51 LoadConstantNoClobber(r_dest, mir_graph_->ConstantValue(rl_src)); 52 } else { 53 DCHECK((rl_src.location == kLocDalvikFrame) || 54 (rl_src.location == kLocCompilerTemp)); 55 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); 56 OpSize op_size; 57 if (rl_src.ref) { 58 op_size = kReference; 59 } else if (rl_src.wide) { 60 op_size = k64; 61 } else { 62 op_size = k32; 63 } 64 LoadBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_src.s_reg_low), r_dest, op_size, kNotVolatile); 65 } 66 } 67 68 /* 69 * Similar to LoadValueDirect, but clobbers and allocates the target 70 * register. Should be used when loading to a fixed register (for example, 71 * loading arguments to an out of line call. 72 */ 73 void Mir2Lir::LoadValueDirectFixed(RegLocation rl_src, RegStorage r_dest) { 74 Clobber(r_dest); 75 MarkInUse(r_dest); 76 LoadValueDirect(rl_src, r_dest); 77 } 78 79 /* 80 * Load a Dalvik register pair into a physical register[s]. Take care when 81 * using this routine, as it doesn't perform any bookkeeping regarding 82 * register liveness. That is the responsibility of the caller. 83 */ 84 void Mir2Lir::LoadValueDirectWide(RegLocation rl_src, RegStorage r_dest) { 85 rl_src = UpdateLocWide(rl_src); 86 if (rl_src.location == kLocPhysReg) { 87 OpRegCopyWide(r_dest, rl_src.reg); 88 } else if (IsInexpensiveConstant(rl_src)) { 89 LoadConstantWide(r_dest, mir_graph_->ConstantValueWide(rl_src)); 90 } else { 91 DCHECK((rl_src.location == kLocDalvikFrame) || 92 (rl_src.location == kLocCompilerTemp)); 93 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); 94 LoadBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_src.s_reg_low), r_dest, k64, kNotVolatile); 95 } 96 } 97 98 /* 99 * Similar to LoadValueDirect, but clobbers and allocates the target 100 * registers. Should be used when loading to a fixed registers (for example, 101 * loading arguments to an out of line call. 102 */ 103 void Mir2Lir::LoadValueDirectWideFixed(RegLocation rl_src, RegStorage r_dest) { 104 Clobber(r_dest); 105 MarkInUse(r_dest); 106 LoadValueDirectWide(rl_src, r_dest); 107 } 108 109 RegLocation Mir2Lir::LoadValue(RegLocation rl_src, RegisterClass op_kind) { 110 DCHECK(!rl_src.ref || op_kind == kRefReg); 111 rl_src = UpdateLoc(rl_src); 112 if (rl_src.location == kLocPhysReg) { 113 if (!RegClassMatches(op_kind, rl_src.reg)) { 114 // Wrong register class, realloc, copy and transfer ownership. 115 RegStorage new_reg = AllocTypedTemp(rl_src.fp, op_kind); 116 OpRegCopy(new_reg, rl_src.reg); 117 // Clobber the old regs and free it. 118 Clobber(rl_src.reg); 119 FreeTemp(rl_src.reg); 120 // ...and mark the new one live. 121 rl_src.reg = new_reg; 122 MarkLive(rl_src); 123 } 124 return rl_src; 125 } 126 127 DCHECK_NE(rl_src.s_reg_low, INVALID_SREG); 128 rl_src.reg = AllocTypedTemp(rl_src.fp, op_kind); 129 LoadValueDirect(rl_src, rl_src.reg); 130 rl_src.location = kLocPhysReg; 131 MarkLive(rl_src); 132 return rl_src; 133 } 134 135 void Mir2Lir::StoreValue(RegLocation rl_dest, RegLocation rl_src) { 136 /* 137 * Sanity checking - should never try to store to the same 138 * ssa name during the compilation of a single instruction 139 * without an intervening ClobberSReg(). 140 */ 141 if (kIsDebugBuild) { 142 DCHECK((live_sreg_ == INVALID_SREG) || 143 (rl_dest.s_reg_low != live_sreg_)); 144 live_sreg_ = rl_dest.s_reg_low; 145 } 146 LIR* def_start; 147 LIR* def_end; 148 DCHECK(!rl_dest.wide); 149 DCHECK(!rl_src.wide); 150 rl_src = UpdateLoc(rl_src); 151 rl_dest = UpdateLoc(rl_dest); 152 if (rl_src.location == kLocPhysReg) { 153 if (IsLive(rl_src.reg) || 154 IsPromoted(rl_src.reg) || 155 (rl_dest.location == kLocPhysReg)) { 156 // Src is live/promoted or Dest has assigned reg. 157 rl_dest = EvalLoc(rl_dest, rl_dest.ref || rl_src.ref ? kRefReg : kAnyReg, false); 158 OpRegCopy(rl_dest.reg, rl_src.reg); 159 } else { 160 // Just re-assign the registers. Dest gets Src's regs 161 rl_dest.reg = rl_src.reg; 162 Clobber(rl_src.reg); 163 } 164 } else { 165 // Load Src either into promoted Dest or temps allocated for Dest 166 rl_dest = EvalLoc(rl_dest, rl_dest.ref ? kRefReg : kAnyReg, false); 167 LoadValueDirect(rl_src, rl_dest.reg); 168 } 169 170 // Dest is now live and dirty (until/if we flush it to home location) 171 MarkLive(rl_dest); 172 MarkDirty(rl_dest); 173 174 175 ResetDefLoc(rl_dest); 176 if (IsDirty(rl_dest.reg) && LiveOut(rl_dest.s_reg_low)) { 177 def_start = last_lir_insn_; 178 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); 179 if (rl_dest.ref) { 180 StoreRefDisp(TargetPtrReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg, kNotVolatile); 181 } else { 182 Store32Disp(TargetPtrReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg); 183 } 184 MarkClean(rl_dest); 185 def_end = last_lir_insn_; 186 if (!rl_dest.ref) { 187 // Exclude references from store elimination 188 MarkDef(rl_dest, def_start, def_end); 189 } 190 } 191 } 192 193 RegLocation Mir2Lir::LoadValueWide(RegLocation rl_src, RegisterClass op_kind) { 194 DCHECK(rl_src.wide); 195 rl_src = UpdateLocWide(rl_src); 196 if (rl_src.location == kLocPhysReg) { 197 if (!RegClassMatches(op_kind, rl_src.reg)) { 198 // Wrong register class, realloc, copy and transfer ownership. 199 RegStorage new_regs = AllocTypedTempWide(rl_src.fp, op_kind); 200 OpRegCopyWide(new_regs, rl_src.reg); 201 // Clobber the old regs and free it. 202 Clobber(rl_src.reg); 203 FreeTemp(rl_src.reg); 204 // ...and mark the new ones live. 205 rl_src.reg = new_regs; 206 MarkLive(rl_src); 207 } 208 return rl_src; 209 } 210 211 DCHECK_NE(rl_src.s_reg_low, INVALID_SREG); 212 DCHECK_NE(GetSRegHi(rl_src.s_reg_low), INVALID_SREG); 213 rl_src.reg = AllocTypedTempWide(rl_src.fp, op_kind); 214 LoadValueDirectWide(rl_src, rl_src.reg); 215 rl_src.location = kLocPhysReg; 216 MarkLive(rl_src); 217 return rl_src; 218 } 219 220 void Mir2Lir::StoreValueWide(RegLocation rl_dest, RegLocation rl_src) { 221 /* 222 * Sanity checking - should never try to store to the same 223 * ssa name during the compilation of a single instruction 224 * without an intervening ClobberSReg(). 225 */ 226 if (kIsDebugBuild) { 227 DCHECK((live_sreg_ == INVALID_SREG) || 228 (rl_dest.s_reg_low != live_sreg_)); 229 live_sreg_ = rl_dest.s_reg_low; 230 } 231 LIR* def_start; 232 LIR* def_end; 233 DCHECK(rl_dest.wide); 234 DCHECK(rl_src.wide); 235 rl_src = UpdateLocWide(rl_src); 236 rl_dest = UpdateLocWide(rl_dest); 237 if (rl_src.location == kLocPhysReg) { 238 if (IsLive(rl_src.reg) || 239 IsPromoted(rl_src.reg) || 240 (rl_dest.location == kLocPhysReg)) { 241 /* 242 * If src reg[s] are tied to the original Dalvik vreg via liveness or promotion, we 243 * can't repurpose them. Similarly, if the dest reg[s] are tied to Dalvik vregs via 244 * promotion, we can't just re-assign. In these cases, we have to copy. 245 */ 246 rl_dest = EvalLoc(rl_dest, kAnyReg, false); 247 OpRegCopyWide(rl_dest.reg, rl_src.reg); 248 } else { 249 // Just re-assign the registers. Dest gets Src's regs 250 rl_dest.reg = rl_src.reg; 251 Clobber(rl_src.reg); 252 } 253 } else { 254 // Load Src either into promoted Dest or temps allocated for Dest 255 rl_dest = EvalLoc(rl_dest, kAnyReg, false); 256 LoadValueDirectWide(rl_src, rl_dest.reg); 257 } 258 259 // Dest is now live and dirty (until/if we flush it to home location) 260 MarkLive(rl_dest); 261 MarkWide(rl_dest.reg); 262 MarkDirty(rl_dest); 263 264 ResetDefLocWide(rl_dest); 265 if (IsDirty(rl_dest.reg) && (LiveOut(rl_dest.s_reg_low) || 266 LiveOut(GetSRegHi(rl_dest.s_reg_low)))) { 267 def_start = last_lir_insn_; 268 DCHECK_EQ((mir_graph_->SRegToVReg(rl_dest.s_reg_low)+1), 269 mir_graph_->SRegToVReg(GetSRegHi(rl_dest.s_reg_low))); 270 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); 271 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg, k64, kNotVolatile); 272 MarkClean(rl_dest); 273 def_end = last_lir_insn_; 274 MarkDefWide(rl_dest, def_start, def_end); 275 } 276 } 277 278 void Mir2Lir::StoreFinalValue(RegLocation rl_dest, RegLocation rl_src) { 279 DCHECK_EQ(rl_src.location, kLocPhysReg); 280 281 if (rl_dest.location == kLocPhysReg) { 282 OpRegCopy(rl_dest.reg, rl_src.reg); 283 } else { 284 // Just re-assign the register. Dest gets Src's reg. 285 rl_dest.location = kLocPhysReg; 286 rl_dest.reg = rl_src.reg; 287 Clobber(rl_src.reg); 288 } 289 290 // Dest is now live and dirty (until/if we flush it to home location) 291 MarkLive(rl_dest); 292 MarkDirty(rl_dest); 293 294 295 ResetDefLoc(rl_dest); 296 if (IsDirty(rl_dest.reg) && LiveOut(rl_dest.s_reg_low)) { 297 LIR *def_start = last_lir_insn_; 298 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); 299 Store32Disp(TargetPtrReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg); 300 MarkClean(rl_dest); 301 LIR *def_end = last_lir_insn_; 302 if (!rl_dest.ref) { 303 // Exclude references from store elimination 304 MarkDef(rl_dest, def_start, def_end); 305 } 306 } 307 } 308 309 void Mir2Lir::StoreFinalValueWide(RegLocation rl_dest, RegLocation rl_src) { 310 DCHECK(rl_dest.wide); 311 DCHECK(rl_src.wide); 312 DCHECK_EQ(rl_src.location, kLocPhysReg); 313 314 if (rl_dest.location == kLocPhysReg) { 315 OpRegCopyWide(rl_dest.reg, rl_src.reg); 316 } else { 317 // Just re-assign the registers. Dest gets Src's regs. 318 rl_dest.location = kLocPhysReg; 319 rl_dest.reg = rl_src.reg; 320 Clobber(rl_src.reg); 321 } 322 323 // Dest is now live and dirty (until/if we flush it to home location). 324 MarkLive(rl_dest); 325 MarkWide(rl_dest.reg); 326 MarkDirty(rl_dest); 327 328 ResetDefLocWide(rl_dest); 329 if (IsDirty(rl_dest.reg) && (LiveOut(rl_dest.s_reg_low) || 330 LiveOut(GetSRegHi(rl_dest.s_reg_low)))) { 331 LIR *def_start = last_lir_insn_; 332 DCHECK_EQ((mir_graph_->SRegToVReg(rl_dest.s_reg_low)+1), 333 mir_graph_->SRegToVReg(GetSRegHi(rl_dest.s_reg_low))); 334 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); 335 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg, k64, kNotVolatile); 336 MarkClean(rl_dest); 337 LIR *def_end = last_lir_insn_; 338 MarkDefWide(rl_dest, def_start, def_end); 339 } 340 } 341 342 /* Utilities to load the current Method* */ 343 void Mir2Lir::LoadCurrMethodDirect(RegStorage r_tgt) { 344 if (GetCompilationUnit()->target64) { 345 LoadValueDirectWideFixed(mir_graph_->GetMethodLoc(), r_tgt); 346 } else { 347 LoadValueDirectFixed(mir_graph_->GetMethodLoc(), r_tgt); 348 } 349 } 350 351 RegStorage Mir2Lir::LoadCurrMethodWithHint(RegStorage r_hint) { 352 // If the method is promoted to a register, return that register, otherwise load it to r_hint. 353 // (Replacement for LoadCurrMethod() usually used when LockCallTemps() is in effect.) 354 DCHECK(r_hint.Valid()); 355 RegLocation rl_method = mir_graph_->GetMethodLoc(); 356 if (rl_method.location == kLocPhysReg) { 357 DCHECK(!IsTemp(rl_method.reg)); 358 return rl_method.reg; 359 } else { 360 LoadCurrMethodDirect(r_hint); 361 return r_hint; 362 } 363 } 364 365 RegLocation Mir2Lir::LoadCurrMethod() { 366 return GetCompilationUnit()->target64 ? 367 LoadValueWide(mir_graph_->GetMethodLoc(), kCoreReg) : 368 LoadValue(mir_graph_->GetMethodLoc(), kRefReg); 369 } 370 371 RegLocation Mir2Lir::ForceTemp(RegLocation loc) { 372 DCHECK(!loc.wide); 373 DCHECK(loc.location == kLocPhysReg); 374 DCHECK(!loc.reg.IsFloat()); 375 if (IsTemp(loc.reg)) { 376 Clobber(loc.reg); 377 } else { 378 RegStorage temp_low = AllocTemp(); 379 OpRegCopy(temp_low, loc.reg); 380 loc.reg = temp_low; 381 } 382 383 // Ensure that this doesn't represent the original SR any more. 384 loc.s_reg_low = INVALID_SREG; 385 return loc; 386 } 387 388 RegLocation Mir2Lir::ForceTempWide(RegLocation loc) { 389 DCHECK(loc.wide); 390 DCHECK(loc.location == kLocPhysReg); 391 DCHECK(!loc.reg.IsFloat()); 392 393 if (!loc.reg.IsPair()) { 394 if (IsTemp(loc.reg)) { 395 Clobber(loc.reg); 396 } else { 397 RegStorage temp = AllocTempWide(); 398 OpRegCopy(temp, loc.reg); 399 loc.reg = temp; 400 } 401 } else { 402 if (IsTemp(loc.reg.GetLow())) { 403 Clobber(loc.reg.GetLow()); 404 } else { 405 RegStorage temp_low = AllocTemp(); 406 OpRegCopy(temp_low, loc.reg.GetLow()); 407 loc.reg.SetLowReg(temp_low.GetReg()); 408 } 409 if (IsTemp(loc.reg.GetHigh())) { 410 Clobber(loc.reg.GetHigh()); 411 } else { 412 RegStorage temp_high = AllocTemp(); 413 OpRegCopy(temp_high, loc.reg.GetHigh()); 414 loc.reg.SetHighReg(temp_high.GetReg()); 415 } 416 } 417 418 // Ensure that this doesn't represent the original SR any more. 419 loc.s_reg_low = INVALID_SREG; 420 return loc; 421 } 422 423 } // namespace art 424