Lines Matching defs:in
5 * you may not use this file except in compliance with the License.
10 * Unless required by applicable law or agreed to in writing, software
65 // Live registers will be restored in the catch block if caught.
91 // Live registers will be restored in the catch block if caught.
193 // Live registers will be restored in the catch block if caught.
468 << "Unexpected instruction in read barrier marking slow path: "
518 // In that case, we have lost the information about the original
533 << "Unexpected instruction in read barrier for heap reference slow path: "
546 // Compute real offset and store it in index_.
577 // The initial register stored in `index_` has already been
578 // saved in the call to art::SlowPathCode::SaveLiveRegisters
582 // Shifting the index value contained in `index_reg` by the
583 // scale factor (2) cannot overflow in practice, as the
683 << "Unexpected instruction in read barrier for GC root slow path: "
769 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
852 // intrinsics may have put the receiver in a different register. In the intrinsics
864 // However this is not required in practice, as this is an
868 // concurrent copying collector may not in the future).
1634 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
1644 // Handle the long/FP comparisons made in instruction simplification.
2242 // However this is not required in practice, as this is an
2246 // concurrent copying collector may not in the future).
2289 Location in = locations->InAt(0);
2292 DCHECK(in.IsRegister());
2293 DCHECK(in.Equals(out));
2298 DCHECK(in.IsRegister());
2299 DCHECK(in.Equals(out));
2304 DCHECK(in.Equals(out));
2315 DCHECK(in.Equals(out));
2531 Location in = locations->InAt(0);
2546 if (in.IsRegister()) {
2547 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2548 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2550 Address(CpuRegister(RSP), in.GetStackIndex()));
2553 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
2573 if (in.IsRegister()) {
2574 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2575 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2577 Address(CpuRegister(RSP), in.GetStackIndex()));
2580 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
2594 if (in.IsRegister()) {
2595 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2596 } else if (in.IsDoubleStackSlot()) {
2598 Address(CpuRegister(RSP), in.GetStackIndex()));
2600 DCHECK(in.IsConstant());
2601 DCHECK(in.GetConstant()->IsLongConstant());
2602 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2609 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2631 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2667 DCHECK(in.IsRegister());
2668 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2673 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2695 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2731 if (in.IsRegister()) {
2732 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2733 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2735 Address(CpuRegister(RSP), in.GetStackIndex()));
2738 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
2757 if (in.IsRegister()) {
2758 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2759 } else if (in.IsConstant()) {
2760 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2765 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2771 if (in.IsRegister()) {
2772 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2773 } else if (in.IsConstant()) {
2774 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2779 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2785 if (in.IsFpuRegister()) {
2786 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2787 } else if (in.IsConstant()) {
2788 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2793 Address(CpuRegister(RSP), in.GetStackIndex()));
2812 if (in.IsRegister()) {
2813 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2814 } else if (in.IsConstant()) {
2815 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2820 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2826 if (in.IsRegister()) {
2827 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2828 } else if (in.IsConstant()) {
2829 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2834 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2840 if (in.IsFpuRegister()) {
2841 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2842 } else if (in.IsConstant()) {
2843 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2848 Address(CpuRegister(RSP), in.GetStackIndex()));
2877 // We can use a leaq or addq if the constant can fit in an immediate.
3126 // The constant may have ended up in a register, so test explicitly to avoid
3142 // The constant may have ended up in a register, so test explicitly to avoid
3238 // Load the values to the FP stack in reverse order, using temporaries if needed.
3554 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3555 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3635 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3637 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3638 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3747 // The shift count needs to be in CL.
3823 // The shift count needs to be in CL (unless it is a constant).
4057 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4098 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4145 // Note that a potential implicit null check is handled in this
4187 // Potential implicit null checks, in the case of reference
4188 // fields, are handled in the previous switch statement.
4195 // Memory barriers, in the case of references, are also handled
4196 // in the previous switch statement.
4217 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4224 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4533 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4617 // Note that a potential implicit null check is handled in this
4687 // Potential implicit null checks, in the case of reference
4688 in the previous switch statement.
4827 // restored afterwards). So in this case, we bail out and
4843 // nor the object reference in `register_value->klass`, as
5785 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
5878 // If the class reference currently in `temp` is not null, jump
5886 // going into the slow path, as it has been overwritten in the
5931 // If the class reference currently in `temp` is not null, jump
5938 // going into the slow path, as it has been overwritten in the
5950 // short in some cases when read barriers are enabled. This has
5987 // going into the slow path, as it has been overwritten in the
6194 // in the following move operation, as we will need it for the
6339 // In slow path based read barriers, the read barrier call is
6340 // inserted after the original load. However, in fast path based
6354 // Note: the original implementation in ReadBarrier::Barrier is
6549 // Is the value in range?
6553 // We are in the range of the table.
6554 // Load the address of the jump table in the constant area.
6627 // Value won't fit in an int.
6664 // Patch in the right value.
6668 // Location in constant area that the fixup refers to.
6673 t * Class to handle late fixup of offsets to a jump table that will be created in the
6789 // Didn't fit in a register. Do it in pieces.