Lines Matching defs:out
211 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
225 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
265 Location out = locations->Out();
266 if (out.IsValid()) {
267 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
268 x86_codegen->Move32(out, Location::RegisterLocation(EAX));
302 : locations->Out();
304 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
342 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
428 ReadBarrierMarkSlowPathX86(HInstruction* instruction, Location out, Location obj)
429 : SlowPathCode(instruction), out_(out), obj_(obj) {
478 Location out,
484 out_(out),
490 // If `obj` is equal to `out` or `ref`, it means the initial object
494 // __ movl(out, Address(out, offset));
499 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
653 ReadBarrierForRootSlowPathX86(HInstruction* instruction, Location out, Location root)
654 : SlowPathCode(instruction), out_(out), root_(root) {
1548 DCHECK(locations->InAt(0).Equals(locations->Out()));
1556 // Figure out how to test the 'condition'.
1611 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1675 Register reg = locations->Out().AsRegister<Register>();
2084 Location out = locations->Out();
2089 DCHECK(in.Equals(out));
2090 __ negl(out.AsRegister<Register>());
2095 DCHECK(in.Equals(out));
2096 __ negl(out.AsRegisterPairLow<Register>());
2102 __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
2103 __ negl(out.AsRegisterPairHigh<Register>());
2107 DCHECK(in.Equals(out));
2115 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
2120 DCHECK(in.Equals(out));
2126 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
2147 Location out = locations->Out();
2148 DCHECK(locations->InAt(0).Equals(out));
2154 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
2157 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
2381 Location out = locations->Out();
2392 __ movsxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
2396 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2406 __ movsxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
2410 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2425 __ movsxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2427 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2431 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
2441 __ movsxw(out.AsRegister<Register>(), in.AsRegister<Register>());
2443 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2447 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
2462 __ movl(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2464 __ movl(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2469 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int32_t>(value)));
2476 Register output = out.AsRegister<Register>();
2501 Register output = out.AsRegister<Register>();
2538 DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
2539 DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
2573 __ movzxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2575 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2579 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
2589 __ movzxw(out.AsRegister<Register>(), in.AsRegister<Register>());
2591 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2595 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
2614 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
2624 if (!in.IsDoubleStackSlot() || !out.IsStackSlot()) {
2632 if (out.IsStackSlot()) {
2633 __ fstps(Address(ESP, out.GetStackIndex() + adjustment));
2637 codegen_->Move32(out, stack_temp);
2649 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2667 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
2677 if (!in.IsDoubleStackSlot() || !out.IsDoubleStackSlot()) {
2685 if (out.IsDoubleStackSlot()) {
2686 __ fstpl(Address(ESP, out.GetStackIndex() + adjustment));
2690 codegen_->Move64(out, stack_temp);
2702 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2759 Location out = locations->Out();
2764 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2765 __ addl(out.AsRegister<Register>(), second.AsRegister<Register>());
2766 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2767 __ addl(out.AsRegister<Register>(), first.AsRegister<Register>());
2769 __ leal(out.AsRegister<Register>(), Address(
2774 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2775 __ addl(out.AsRegister<Register>(), Immediate(value));
2777 __ leal(out.AsRegister<Register>(), Address(first.AsRegister<Register>(), value));
2780 DCHECK(first.Equals(locations->Out()));
2876 DCHECK(first.Equals(locations->Out()));
2992 Location out = locations->Out();
3000 __ imull(out.AsRegister<Register>(), first.AsRegister<Register>(), imm);
3002 DCHECK(first.Equals(out));
3006 DCHECK(first.Equals(out));
3094 DCHECK(first.Equals(locations->Out()));
3112 DCHECK(first.Equals(locations->Out()));
3182 Location out = locations->Out();
3218 DCHECK(out.IsFpuRegister()) << out;
3220 __ movss(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3222 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3237 Register out_register = locations->Out().AsRegister<Register>();
3257 Register out_register = locations->Out().AsRegister<Register>();
3285 Register out = locations->Out().AsRegister<Register>();
3293 edx = locations->Out().AsRegister<Register>();
3300 DCHECK_EQ(EAX, out);
3302 DCHECK_EQ(EDX, out);
3315 __ xorl(out, out);
3361 Location out = locations->Out();
3369 DCHECK_EQ(is_div ? EAX : EDX, out.AsRegister<Register>());
3386 instruction, out.AsRegister<Register>(), is_div);
3412 DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
3413 DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
3702 DCHECK(first.Equals(locations->Out()));
4074 __ movl(locations->Out().AsRegister<Register>(),
4079 __ movl(locations->Out().AsRegister<Register>(),
4083 __ movl(locations->Out().AsRegister<Register>(),
4084 Address(locations->Out().AsRegister<Register>(), method_offset));
4098 Location out = locations->Out();
4099 DCHECK(in.Equals(out));
4102 __ notl(out.AsRegister<Register>());
4106 __ notl(out.AsRegisterPairLow<Register>());
4107 __ notl(out.AsRegisterPairHigh<Register>());
4125 Location out = locations->Out();
4126 DCHECK(in.Equals(out));
4127 __ xorl(out.AsRegister<Register>(), Immediate(1));
4165 Register out = locations->Out().AsRegister<Register>();
4233 __ movl(out, Immediate(0));
4238 __ movl(out, Immediate(1));
4242 __ movl(out, Immediate(-1));
4401 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
4584 Location out = locations->Out();
4591 __ movzxb(out.AsRegister<Register>(), Address(base, offset));
4596 __ movsxb(out.AsRegister<Register>(), Address(base, offset));
4601 __ movsxw(out.AsRegister<Register>(), Address(base, offset));
4606 __ movzxw(out.AsRegister<Register>(), Address(base, offset));
4611 __ movl(out.AsRegister<Register>(), Address(base, offset));
4615 // /* HeapReference<Object> */ out = *(base + offset)
4621 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4626 __ movl(out.AsRegister<Register>(), Address(base, offset));
4634 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4644 __ movd(out.AsRegisterPairLow<Register>(), temp);
4646 __ movd(out.AsRegisterPairHigh<Register>(), temp);
4648 DCHECK_NE(base, out.AsRegisterPairLow<Register>());
4649 __ movl(out.AsRegisterPairLow<Register>(), Address(base, offset));
4651 __ movl(out.AsRegisterPairHigh<Register>(), Address(base, kX86WordSize + offset));
4657 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4662 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
5044 Location out_loc = locations->Out();
5050 Register out = out_loc.AsRegister<Register>();
5052 __ movzxb(out, Address(obj,
5055 __ movzxb(out, Address(obj, index.AsRegister<Register>(), TIMES_1, data_offset));
5062 Register out = out_loc.AsRegister<Register>();
5064 __ movsxb(out, Address(obj,
5067 __ movsxb(out, Address(obj, index.AsRegister<Register>(), TIMES_1, data_offset));
5074 Register out = out_loc.AsRegister<Register>();
5076 __ movsxw(out, Address(obj,
5079 __ movsxw(out, Address(obj, index.AsRegister<Register>(), TIMES_2, data_offset));
5086 Register out = out_loc.AsRegister<Register>();
5088 __ movzxw(out, Address(obj,
5091 __ movzxw(out, Address(obj, index.AsRegister<Register>(), TIMES_2, data_offset));
5098 Register out = out_loc.AsRegister<Register>();
5100 __ movl(out, Address(obj,
5103 __ movl(out, Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset));
5113 // /* HeapReference<Object> */ out =
5122 Register out = out_loc.AsRegister<Register>();
5126 __ movl(out, Address(obj, offset));
5133 __ movl(out, Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset));
5165 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
5167 __ movss(out, Address(obj,
5170 __ movss(out, Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset));
5177 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
5179 __ movsd(out, Address(obj,
5182 __ movsd(out, Address(obj, index.AsRegister<Register>(), TIMES_8, data_offset));
5337 // restored afterwards). So in this case, we bail out and
5503 Register out = locations->Out().AsRegister<Register>();
5504 __ movl(out, Address(obj, offset));
5899 Location out_loc = locations->Out();
5900 Register out = out_loc.AsRegister<Register>();
5906 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5910 // /* GcRoot<mirror::Class>[] */ out =
5912 __ movl(out, Address(current_method,
5914 // /* GcRoot<mirror::Class> */ out = out[type_index]
5916 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
5925 __ testl(out, out);
5930 GenerateClassInitializationCheck(slow_path, out);
6022 Location out_loc = locations->Out();
6023 Register out = out_loc.AsRegister<Register>();
6028 __ movl(out, Immediate(/* placeholder */ 0));
6035 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
6043 __ movl(out, Immediate(address));
6064 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6068 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
6069 __ movl(out, Address(out, mirror::Class::DexCacheStringsOffset().Int32Value()));
6070 // /* GcRoot<mirror::String> */ out = out[string_index]
6072 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
6083 __ testl(out, out);
6100 __ fs()->movl(load->GetLocations()->Out().AsRegister<Register>(), GetExceptionTlsAddress());
6155 // Note that TypeCheckSlowPathX86 uses this "out" register too.
6170 Location out_loc = locations->Out();
6171 Register out = out_loc.AsRegister<Register>();
6189 // /* HeapReference<Class> */ out = obj->klass_
6195 __ cmpl(out, cls.AsRegister<Register>());
6198 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6203 __ movl(out, Immediate(1));
6213 // /* HeapReference<Class> */ out = out->super_class_
6215 __ testl(out, out);
6216 // If `out` is null, we use it for the result, and jump to `done`.
6219 __ cmpl(out, cls.AsRegister<Register>());
6222 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6225 __ movl(out, Immediate(1));
6237 __ cmpl(out, cls.AsRegister<Register>());
6240 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6243 // /* HeapReference<Class> */ out = out->super_class_
6245 __ testl(out, out);
6247 // If `out` is null, we use it for the result, and jump to `done`.
6250 __ movl(out, Immediate(1));
6261 __ cmpl(out, cls.AsRegister<Register>());
6264 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6268 // /* HeapReference<Class> */ out = out->component_type_
6270 __ testl(out, out);
6271 // If `out` is null, we use it for the result, and jump to `done`.
6273 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6276 __ movl(out, Immediate(1));
6283 __ cmpl(out, cls.AsRegister<Register>());
6286 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6293 __ movl(out, Immediate(1));
6334 __ xorl(out, out);
6613 DCHECK(first.Equals(locations->Out()));
6717 Location out,
6720 Register out_reg = out.AsRegister<Register>();
6725 // /* HeapReference<Object> */ out = *(out + offset)
6727 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
6730 // Save the value of `out` into `maybe_temp` before overwriting it
6734 // /* HeapReference<Object> */ out = *(out + offset)
6736 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6740 // /* HeapReference<Object> */ out = *(out + offset)
6747 Location out,
6751 Register out_reg = out.AsRegister<Register>();
6757 // /* HeapReference<Object> */ out = *(obj + offset)
6759 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
6762 // /* HeapReference<Object> */ out = *(obj + offset)
6764 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6768 // /* HeapReference<Object> */ out = *(obj + offset)
6942 out,
6952 // reference will be carried out by the runtime within the slow
6961 ReadBarrierForHeapReferenceSlowPathX86(instruction, out, ref, obj, offset, index);
6969 Location out,
6980 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
6982 __ UnpoisonHeapReference(out.AsRegister<Register>());
6987 Location out,
6996 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86(instruction, out, root);
7025 // Figure out the correct compare values and jump conditions.
7145 Register reg = locations->Out().AsRegister<Register>();
7193 Location out = locations->Out();
7199 __ movss(out.AsFpuRegister<XmmRegister>(),
7204 __ movsd(out.AsFpuRegister<XmmRegister>(),
7209 __ movl(out.AsRegister<Register>(),