Home | History | Annotate | Download | only in optimizing

Lines Matching defs:out

106     Location out = invoke_->GetLocations()->Out();
107 if (out.IsValid()) {
108 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
109 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
110 codegen->MoveFromReturnRegister(out, invoke_->GetType());
280 Location output = locations->Out();
290 Location output = locations->Out();
348 vixl32::Register out = RegisterFrom(locations->Out());
357 __ Clz(out, in_reg_hi);
359 __ Clz(out, in_reg_lo);
360 __ Add(out, out, 32);
365 __ Clz(out, RegisterFrom(in));
396 vixl32::Register out = RegisterFrom(locations->Out());
403 __ Rbit(out, in_reg_lo);
404 __ Clz(out, out);
406 __ Rbit(out, in_reg_hi);
407 __ Clz(out, out);
408 __ Add(out, out, 32);
414 __ Rbit(out, in);
415 __ Clz(out, out);
477 Location output = locations->Out();
525 Location out_loc = invoke->GetLocations()->Out();
535 vixl32::SRegister out = OutputSRegister(invoke);
542 DCHECK(op1.Is(out));
555 __ vmov(cond, F32, out, op2);
568 __ Vmov(out, temp1);
574 __ Vmov(out, temp1);
612 Location out_loc = invoke->GetLocations()->Out();
622 vixl32::DRegister out = OutputDRegister(invoke);
626 DCHECK(op1.Is(out));
639 __ vmov(cond, F64, out, op2);
646 __ Vand(F64, out, op1, op2);
652 __ Vorr(F64, out, op1, op2); // assemble op1/-0.0/NaN.
678 Location out_loc = invoke->GetLocations()->Out();
742 vixl32::Register out = OutputRegister(invoke);
752 __ mov(is_min ? lt : gt, out, op1);
753 __ mov(is_min ? ge : le, out, op2);
885 vixl32::Register lo = LowRegisterFrom(invoke->GetLocations()->Out());
886 vixl32::Register hi = HighRegisterFrom(invoke->GetLocations()->Out());
978 Location trg_loc = locations->Out();
1215 bool value_can_be_null = true; // TODO: Worth finding out this information?
1323 Location out_loc = locations->Out();
1324 vixl32::Register out = OutputRegister(invoke); // Boolean result.
1343 bool value_can_be_null = true; // TODO: Worth finding out this information?
1404 __ Rsbs(out, tmp, 1);
1412 __ mov(cc, out, 0);
1474 vixl32::Register out = OutputRegister(invoke);
1506 __ Subs(out, str, arg);
1521 // out = length diff.
1522 __ Subs(out, temp0, temp1);
1617 // the remaining string data, so just return length diff (out).
1632 __ Lsr(out, temp_reg, temp1); // Extract first character.
1634 __ And(out, out, temp3);
1638 __ Lsr(out, temp_reg, temp1);
1640 __ Movt(out, 0);
1643 __ Sub(out, out, temp2);
1694 __ Sub(out, temp_reg, temp3);
1706 __ rsb(cc, out, out, 0);
1738 vixl32::Register out = OutputRegister(invoke);
1812 __ Ldr(out, MemOperand(str, temp1));
1815 __ Cmp(out, temp2);
1824 __ Mov(out, 1);
1829 __ Mov(out, 0);
2166 // Bail out if the source is null.
2171 // Bail out if the destination is null.
2175 // If the length is negative, bail out.
2213 // Bail out if the source is not a non primitive array.
2231 // Bail out if the destination is not a non primitive array.
2293 // Bail out if the destination is not a non primitive array.
2305 // Bail out if the source is not a non primitive array.
2338 // Bail out if the source is not a non primitive array.
2428 // rb_state. We do that by shifting the bit out of the lock word with LSRS
2433 // Carry flag is the last bit shifted out by LSRS.
2487 // the input and output locations are unallocated, the register allocator runs out of
2513 // the input and output locations are unallocated, the register allocator runs out of
2739 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out());
2740 vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out());
2769 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out());
2770 vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out());
2879 // Early out for valid zero-length retrievals.
2964 const vixl32::Register out = OutputRegister(invoke);
2969 __ Vmov(out, InputSRegisterAt(invoke, 0));
2971 __ Lsl(out, out, 1);
2972 __ Eor(out, out, infinity);
2973 codegen_->GenerateConditionWithZero(kCondEQ, out, out);
2982 const vixl32::Register out = OutputRegister(invoke);
2994 __ Vmov(temp, out, InputDRegisterAt(invoke, 0));
2995 __ Eor(out, out, infinity_high);
2996 __ Eor(out, out, infinity_high2);
2998 __ Orr(out, temp, Operand(out, vixl32::LSL, 1));
2999 codegen_->GenerateConditionWithZero(kCondEQ, out, out);
3040 vixl32::Register out = RegisterFrom(locations->Out());
3053 __ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
3064 assembler->StoreToOffset(kStoreWord, temp, out, info.value_offset);
3072 __ Add(out, in, -info.low);
3073 __ Cmp(out, info.high - info.low + 1);
3080 codegen_->LoadFromShiftedRegOffset(Primitive::kPrimNot, locations->Out(), temp, out);
3081 assembler->MaybeUnpoisonHeapReference(out);
3089 assembler->StoreToOffset(kStoreWord, in, out, info.value_offset);
3106 vixl32::Register out = RegisterFrom(invoke->GetLocations()->Out());
3108 __ Ldr(out, MemOperand(tr, offset));
3113 __ CompareAndBranchIfZero(out, final_label, /* far_target */ false);