Home | History | Annotate | Download | only in optimizing

Lines Matching defs:out

476     Location out = locations->Out();
491 // In the unlucky case that the `temp` is R0, we preserve the address in `out` across
494 entry_address = temp_is_r0 ? RegisterFrom(out) : temp;
528 if (out.IsValid()) {
529 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
530 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
560 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
563 vixl32::Register out = OutputRegister(load);
571 // In the unlucky case that the `temp` is R0, we preserve the address in `out` across
577 entry_address = temp_is_r0 ? out : temp;
603 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
623 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
648 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
974 // rb_state. We do that by shifting the bit out of the lock word with LSRS
979 __ B(cc, GetExitLabel()); // Carry flag is the last bit shifted out by LSRS.
1116 // rb_state. We do that by shifting the bit out of the lock word with LSRS
1121 __ B(cc, GetExitLabel()); // Carry flag is the last bit shifted out by LSRS.
1238 Location out,
1244 out_(out),
1250 // If `obj` is equal to `out` or `ref`, it means the initial object
1254 // __ LoadFromOffset(kLoadWord, out, out, offset);
1259 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
1424 ReadBarrierForRootSlowPathARMVIXL(HInstruction* instruction, Location out, Location root)
1425 : SlowPathCodeARMVIXL(instruction), out_(out), root_(root) {
1575 vixl32::Register out,
1584 __ Mov(out, in);
1588 __ Add(out, first, second);
1591 __ And(out, first, second);
1594 __ Orr(out, first, second);
1597 __ Sub(out, first, second);
1600 __ Eor(out, first, second);
1610 const Location& out,
1617 const vixl32::Register out_hi = HighRegisterFrom(out);
1618 const vixl32::Register out_lo = LowRegisterFrom(out);
1646 const Location out = locations->Out();
1649 const vixl32::Register out_hi = HighRegisterFrom(out);
1650 const vixl32::Register out_lo = LowRegisterFrom(out);
1665 out,
1673 out,
1710 out,
1743 out,
2065 const vixl32::Register out = OutputRegister(cond);
2068 __ Mov(LeaveFlags, out, 0);
2070 if (out.IsLow()) {
2077 __ mov(condition.first, out, 1);
2083 __ Mov(out, 1);
2096 const vixl32::Register out = OutputRegister(cond);
2113 if (out.Is(left_high)) {
2118 __ Sub(out, left_low, right_low);
2125 __ Sub(out, left_low, LowRegisterFrom(right));
2131 if (condition == kCondNE && out.IsLow()) {
2132 __ Orrs(out, out, temp);
2140 __ mov(ne, out, 1);
2142 __ Orr(out, out, temp);
2143 codegen->GenerateConditionWithZero(condition, out, out, temp);
2237 const vixl32::Register out = OutputRegister(cond);
2249 if (out.IsLow()) {
2251 __ Orrs(out, LowRegisterFrom(left), HighRegisterFrom(left));
2259 __ mov(ne, out, 1);
2267 __ Orr(out, LowRegisterFrom(left), HighRegisterFrom(left));
2268 codegen->GenerateConditionWithZero(condition, out, out);
2276 codegen->GenerateConditionWithZero(condition, out, HighRegisterFrom(left));
2287 // If `out` is a low register, then the GenerateConditionGeneric()
2289 (!out.IsLow() || !CanGenerateTest(cond, codegen->GetAssembler()))) {
2308 __ Mov(out, 0);
2313 __ Mov(out, 1);
2333 const vixl32::Register out = OutputRegister(cond);
2347 if (out.IsLow() && out.Is(in)) {
2348 __ Cmp(out, 0);
2356 __ mov(ne, out, 1);
2367 codegen->GenerateConditionWithZero(condition, out, in);
2382 } else if (out.Is(RegisterFrom(right))) {
2390 if (condition == kCondNE && out.IsLow()) {
2391 __ Subs(out, in, operand);
2399 __ mov(ne, out, 1);
2401 __ Sub(out, in, operand);
2402 codegen->GenerateConditionWithZero(condition, out, out);
2438 static bool CanGenerateConditionalMove(const Location& out, const Location& src) {
2442 if (!out.IsRegister() && !out.IsRegisterPair()) {
2454 if (out.IsRegister()) {
2455 if (!RegisterFrom(out).IsLow()) {
2459 DCHECK(out.IsRegisterPair());
2461 if (!HighRegisterFrom(out).IsLow()) {
3203 const Location out = locations->Out();
3214 codegen_->MoveLocation(out, src, type);
3223 if (out.Equals(second)) {
3226 } else if (out.Equals(first)) {
3239 if (CanGenerateConditionalMove(out, src)) {
3240 if (!out.Equals(first) && !out.Equals(second)) {
3241 codegen_->MoveLocation(out, src.Equals(first) ? second : first, type);
3253 const size_t instr_count = out.IsRegisterPair() ? 4 : 2;
3259 if (out.IsRegister()) {
3261 __ mov(cond.first, RegisterFrom(out), OperandFrom(src, type));
3263 DCHECK(out.IsRegisterPair());
3280 __ mov(cond.first, LowRegisterFrom(out), operand_low);
3282 __ mov(cond.first, HighRegisterFrom(out), operand_high);
3294 if (out.Equals(second)) {
3301 if (!out.Equals(first)) {
3302 codegen_->MoveLocation(out, first, type);
3307 codegen_->MoveLocation(out, src, type);
3330 vixl32::Register out,
3337 if (!temp.IsValid() || (out.IsLow() && !out.Is(in))) {
3338 temp = out;
3347 if (out.Is(in)) {
3351 // out = - in + in + carry = carry
3352 __ Adc(out, temp, in);
3355 __ Clz(out, in);
3358 __ Lsr(out, out, 5);
3367 if (out.Is(in)) {
3372 temp = out;
3377 // out = in + ~temp + carry = in + (-(in - 1) - 1) + carry = in - in + 1 - 1 + carry = carry
3378 __ Sbc(out, in, temp);
3382 __ Mvn(out, in);
3383 in = out;
3387 __ Lsr(out, in, 31);
3391 __ Mov(out, 1);
3395 __ Mov(out, 0);
3457 const vixl32::Register out = OutputRegister(cond);
3466 if (out.Is(right)) {
3470 __ Eor(out, left, right);
3473 __ Eor(out, out, 1);
3831 Location out = locations->Out();
3839 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
3840 __ Rsbs(LowRegisterFrom(out), LowRegisterFrom(in), 0);
3846 // out.hi = -C
3847 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(out));
3848 // out.hi = out.hi - in.hi
3849 __ Sub(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(in));
4081 Location out = locations->Out();
4133 DCHECK(out.IsRegister());
4180 DCHECK(out.IsRegisterPair());
4182 __ Mov(LowRegisterFrom(out), InputRegisterAt(conversion, 0));
4184 __ Asr(HighRegisterFrom(out), LowRegisterFrom(out), 31);
4266 __ Vmov(LowSRegisterFrom(out), InputRegisterAt(conversion, 0));
4267 __ Vcvt(F64, S32, DRegisterFrom(out), LowSRegisterFrom(out));
4275 vixl32::SRegister out_s = LowSRegisterFrom(out);
4276 vixl32::DRegister out_d = DRegisterFrom(out);
4296 __ Vcvt(F64, F32, DRegisterFrom(out), InputSRegisterAt(conversion, 0));
4344 Location out = locations->Out();
4357 GenerateAddLongConst(out, first, value);
4360 __ Adds(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
4361 __ Adc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
4407 Location out = locations->Out();
4419 GenerateAddLongConst(out, first, -value);
4422 __ Subs(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
4423 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
4465 Location out = locations->Out();
4474 vixl32::Register out_hi = HighRegisterFrom(out);
4475 vixl32::Register out_lo = LowRegisterFrom(out);
4482 // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
4483 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
4488 // output: out
4489 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
4490 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
4491 // parts: out.lo = (in1.lo * in2.lo)[31:0]
4497 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
4499 // out.lo <- (in1.lo * in2.lo)[31:0];
4501 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
4523 vixl32::Register out = OutputRegister(instruction);
4529 __ Mov(out, 0);
4532 __ Mov(out, dividend);
4534 __ Rsb(out, dividend, 0);
4547 vixl32::Register out = OutputRegister(instruction);
4560 __ Add(out, temp, dividend);
4563 __ Asr(out, out, ctz_imm);
4565 __ Rsb(out, out, 0);
4568 __ Ubfx(out, out, 0, ctz_imm);
4569 __ Sub(out, out, temp);
4581 vixl32::Register out = OutputRegister(instruction);
4606 __ Sub(out, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
4611 __ Mls(out, temp1, temp2, dividend);
4730 DCHECK(LowRegisterFrom(div->GetLocations()->Out()).Is(r0));
4731 DCHECK(HighRegisterFrom(div->GetLocations()->Out()).Is(r1));
4934 vixl32::Register out = OutputRegister(ror);
4944 __ Ror(out, in, rot);
4945 } else if (!out.Is(in)) {
4946 __ Mov(out, in);
4949 __ Ror(out, in, RegisterFrom(rhs));
4962 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out());
4963 vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out());
4976 // Rotate, or mov to out for zero or word size rotations.
5111 Location out = locations->Out();
5147 vixl32::Register o_h = HighRegisterFrom(out);
5148 vixl32::Register o_l = LowRegisterFrom(out);
5390 Location out = locations->Out();
5398 __ Mvn(LowRegisterFrom(out), LowRegisterFrom(in));
5399 __ Mvn(HighRegisterFrom(out), HighRegisterFrom(in));
5448 vixl32::Register out = OutputRegister(compare);
5462 // Emit move to `out` before the `Cmp`, as `Mov` might affect the status flags.
5463 __ Mov(out, 0);
5472 // Emit move to `out` before the last `Cmp`, as `Mov` might affect the status flags.
5473 __ Mov(out, 0);
5480 __ Mov(out, 0);
5496 __ Mov(out, 1);
5500 __ Mov(out, -1);
5890 Location out = locations->Out();
5898 GetAssembler()->LoadFromOffset(kLoadUnsignedByte, RegisterFrom(out), base, offset);
5902 GetAssembler()->LoadFromOffset(kLoadSignedByte, RegisterFrom(out), base, offset);
5906 GetAssembler()->LoadFromOffset(kLoadSignedHalfword, RegisterFrom(out), base, offset);
5910 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, RegisterFrom(out), base, offset);
5914 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
5918 // /* HeapReference<Object> */ out = *(base + offset)
5924 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
5929 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
5937 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, locations->InAt(0), offset);
5944 GenerateWideAtomicLoad(base, offset, LowRegisterFrom(out), HighRegisterFrom(out));
5946 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out), base, offset);
5951 GetAssembler()->LoadSFromOffset(SRegisterFrom(out), base, offset);
5955 vixl32::DRegister out_dreg = DRegisterFrom(out);
6306 Location out_loc = locations->Out();
6401 // /* HeapReference<Object> */ out =
6422 vixl32::Register out = OutputRegister(instruction);
6426 GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset);
6484 vixl32::SRegister out = SRegisterFrom(out_loc);
6487 GetAssembler()->LoadSFromOffset(out, obj, offset);
6492 GetAssembler()->LoadSFromOffset(out, temp, data_offset);
6821 vixl32::Register out = OutputRegister(instruction);
6826 __ ldr(out, MemOperand(obj, offset));
6829 // Mask out compression flag from String's array length.
6831 __ Lsr(out, out, 1u);
6845 vixl32::Register out = OutputRegister(instruction);
6850 __ Add(out, first, RegisterFrom(second));
6852 __ Add(out, first, Int32ConstantFrom(second));
7334 Location out_loc = locations->Out();
7335 vixl32::Register out = OutputRegister(cls);
7345 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7359 codegen_->EmitMovwMovtPlaceholder(labels, out);
7367 __ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
7373 : out;
7382 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
7385 // /* GcRoot<mirror::Class> */ out = *out
7386 GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, read_barrier_option);
7401 __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
7404 GenerateClassInitializationCheck(slow_path, out);
7498 Location out_loc = locations->Out();
7499 vixl32::Register out = OutputRegister(load);
7507 codegen_->EmitMovwMovtPlaceholder(labels, out);
7514 __ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
7521 : out;
7529 __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
7534 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
7537 // /* GcRoot<mirror::String> */ out = *out
7538 GenerateGcRootFieldLoad(load, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
7564 vixl32::Register out = OutputRegister(load);
7565 GetAssembler()->LoadFromOffset(kLoadWord, out, tr, GetExceptionTlsOffset());
7640 // The "out" register is used as a temporary, so it overlaps with the inputs.
7655 Location out_loc = locations->Out();
7656 vixl32::Register out = OutputRegister(instruction);
7671 DCHECK(!out.Is(obj));
7672 __ Mov(out, 0);
7678 // /* HeapReference<Class> */ out = obj->klass_
7686 __ Cmp(out, cls);
7689 __ Mov(LeaveFlags, out, 0);
7694 if (out.IsLow()) {
7701 __ mov(eq, out, 1);
7704 __ Mov(out, 1);
7711 // /* HeapReference<Class> */ out = obj->klass_
7722 // /* HeapReference<Class> */ out = out->super_class_
7728 // If `out` is null, we use it for the result, and jump to the final label.
7729 __ CompareAndBranchIfZero(out, final_label, /* far_target */ false);
7730 __ Cmp(out, cls);
7732 __ Mov(out, 1);
7737 // /* HeapReference<Class> */ out = obj->klass_
7747 __ Cmp(out, cls);
7749 // /* HeapReference<Class> */ out = out->super_class_
7757 __ Cmp(out, 1);
7763 if (out.IsLow()) {
7764 // If `out` is null, we use it for the result, and the condition flags
7767 // overwriting `out`).
7779 __ mov(eq, out, 1);
7781 // If `out` is null, we use it for the result, and jump to the final label.
7784 __ Mov(out, 1);
7791 // /* HeapReference<Class> */ out = obj->klass_
7800 __ Cmp(out, cls);
7803 // /* HeapReference<Class> */ out = out->component_type_
7809 // If `out` is null, we use it for the result, and jump to the final label.
7810 __ CompareAndBranchIfZero(out, final_label, /* far_target */ false);
7811 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
7813 __ Cmp(out, 0);
7816 __ Mov(LeaveFlags, out, 0);
7821 if (out.IsLow()) {
7830 __ mov(eq, out, 1);
7834 __ Mov(out, 1);
7842 // /* HeapReference<Class> */ out = obj->klass_
7849 __ Cmp(out, cls);
7855 __ Mov(out, 1);
8201 Location out = locations->Out();
8206 vixl32::Register out_reg = RegisterFrom(out);
8229 vixl32::Register out_low = LowRegisterFrom(out);
8230 vixl32::Register out_high = HighRegisterFrom(out);
8291 DCHECK(!LowRegisterFrom(locations->Out()).Is(second));
8293 locations->Out(),
8305 void InstructionCodeGeneratorARMVIXL::GenerateAndConst(vixl32::Register out,
8310 if (!out.Is(first)) {
8311 __ Mov(out, first);
8316 __ Mov(out, 0);
8320 __ And(out, first, value);
8322 __ Bic(out, first, ~value);
8325 __ Ubfx(out, first, 0, WhichPowerOf2(value + 1));
8330 void InstructionCodeGeneratorARMVIXL::GenerateOrrConst(vixl32::Register out,
8335 if (!out.Is(first)) {
8336 __ Mov(out, first);
8341 __ Mvn(out, 0);
8345 __ Orr(out, first, value);
8348 __ Orn(out, first, ~value);
8353 void InstructionCodeGeneratorARMVIXL::GenerateEorConst(vixl32::Register out,
8358 if (!out.Is(first)) {
8359 __ Mov(out, first);
8363 __ Eor(out, first, value);
8366 void InstructionCodeGeneratorARMVIXL::GenerateAddLongConst(Location out,
8369 vixl32::Register out_low = LowRegisterFrom(out);
8370 vixl32::Register out_high = HighRegisterFrom(out);
8397 Location out = locations->Out();
8418 vixl32::Register out_low = LowRegisterFrom(out);
8419 vixl32::Register out_high = HighRegisterFrom(out);
8453 vixl32::Register out_low = LowRegisterFrom(out);
8454 vixl32::Register out_high = HighRegisterFrom(out);
8471 Location out,
8475 vixl32::Register out_reg = RegisterFrom(out);
8481 // /* HeapReference<Object> */ out = *(out + offset)
8483 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
8486 // Save the value of `out` into `maybe_temp` before overwriting it
8490 // /* HeapReference<Object> */ out = *(out + offset)
8492 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
8496 // /* HeapReference<Object> */ out = *(out + offset)
8504 Location out,
8509 vixl32::Register out_reg = RegisterFrom(out);
8516 // /* HeapReference<Object> */ out = *(obj + offset)
8518 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
8521 // /* HeapReference<Object> */ out = *(obj + offset)
8523 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
8527 // /* HeapReference<Object> */ out = *(obj + offset)
8963 Location out,
8973 // reference will be carried out by the runtime within the slow
8982 ReadBarrierForHeapReferenceSlowPathARMVIXL(instruction, out, ref, obj, offset, index);
8990 Location out,
9001 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
9003 GetAssembler()->UnpoisonHeapReference(RegisterFrom(out));
9008 Location out,
9017 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARMVIXL(instruction, out, root);
9532 vixl32::Register out) {
9538 __ movw(out, /* placeholder */ 0u);
9540 __ movt(out, /* placeholder */ 0u);
9542 __ add(out, out, pc);