Lines Matching defs:in
5 * you may not use this file except in compliance with the License.
10 * Unless required by applicable law or agreed to in writing, software
67 // Live registers will be restored in the catch block if caught.
186 // Live registers will be restored in the catch block if caught.
459 // barrier. The field `obj.field` in the object `obj` holding this
468 // reference (different from `ref`) in `obj.field`).
498 << "Unexpected instruction in read barrier marking slow path: "
515 // and output in R0):
536 // Should the reference in `ref_` be unpoisoned prior to marking it?
543 // and if needed, atomically updating the field `obj.field` in the
551 // another object reference (different from `ref`) in `obj.field`).
583 << "Unexpected instruction in read barrier marking and field updating slow path: "
606 // and output in R0):
623 // update the field in the holder (`*field_addr`).
626 // another thread had concurrently changed it. In that case, the
627 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
640 // reference) to be in EAX. Save RAX beforehand, and move the
641 // expected value (stored in `temp1_`) into EAX.
705 // Should the reference in `ref_` be unpoisoned prior to marking it?
737 // In that case, we have lost the information about the original
755 << "Unexpected instruction in read barrier for heap reference slow path: "
768 // Compute real offset and store it in index_.
799 // The initial register stored in `index_` has already been
800 // saved in the call to art::SlowPathCode::SaveLiveRegisters
804 // Shifting the index value contained in `index_reg` by the
805 // scale factor (2) cannot overflow in practice, as the
814 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
816 // (as in the case of ArrayGet), as it is actually an offset
909 << "Unexpected instruction in read barrier for GC root slow path: "
984 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
1042 // intrinsics may have put the receiver in a different register. In the intrinsics
1054 // However this is not required in practice, as this is an
1058 // concurrent copying collector may not in the future).
1302 // do this in HCurrentMethod, as the instruction might have been removed
1303 // in the SSA graph.
1843 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
1853 // Handle the long/FP comparisons made in instruction simplification.
2428 // However this is not required in practice, as this is an
2432 // concurrent copying collector may not in the future).
2483 Location in = locations->InAt(0);
2486 DCHECK(in.IsRegister());
2487 DCHECK(in.Equals(out));
2492 DCHECK(in.IsRegister());
2493 DCHECK(in
2498 DCHECK(in.Equals(out));
2509 DCHECK(in.Equals(out));
2725 Location in = locations->InAt(0);
2740 if (in.IsRegister()) {
2741 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2742 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2744 Address(CpuRegister(RSP), in.GetStackIndex()));
2747 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
2767 if (in.IsRegister()) {
2768 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2769 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2771 Address(CpuRegister(RSP), in.GetStackIndex()));
2774 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
2788 if (in.IsRegister()) {
2789 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2790 } else if (in.IsDoubleStackSlot()) {
2792 Address(CpuRegister(RSP), in.GetStackIndex()));
2794 DCHECK(in.IsConstant());
2795 DCHECK(in.GetConstant()->IsLongConstant());
2796 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2803 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2825 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2861 DCHECK(in.IsRegister());
2862 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2867 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2889 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2925 if (in.IsRegister()) {
2926 in.AsRegister<CpuRegister>());
2927 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2929 Address(CpuRegister(RSP), in.GetStackIndex()));
2932 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
2951 if (in.IsRegister()) {
2952 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2953 } else if (in.IsConstant()) {
2954 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2959 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2965 if (in.IsRegister()) {
2966 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2967 } else if (in.IsConstant()) {
2968 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2973 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2979 if (in.IsFpuRegister()) {
2980 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2981 } else if (in.IsConstant()) {
2982 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2987 Address(CpuRegister(RSP), in.GetStackIndex()));
3006 if (in.IsRegister()) {
3007 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3008 } else if (in.IsConstant()) {
3009 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3014 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3020 if (in.IsRegister()) {
3021 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3022 } else if (in.IsConstant()) {
3023 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3028 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3034 if (in.IsFpuRegister()) {
3035 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3036 } else if (in.IsConstant()) {
3037 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3042 Address(CpuRegister(RSP), in.GetStackIndex()));
3071 // We can use a leaq or addq if the constant can fit in an immediate.
3320 // The constant may have ended up in a register, so test explicitly to avoid
3336 // The constant may have ended up in
3432 // Load the values to the FP stack in reverse order, using temporaries if needed.
3737 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3738 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3818 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3820 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3821 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3924 // The shift count needs to be in CL.
4000 // The shift count needs to be in CL (unless it is a constant).
4224 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4311 // Note that a potential implicit null check is handled in this
4353 // Potential implicit null checks, in the case of reference
4354 // fields, are handled in the previous switch statement.
4361 // Memory barriers, in the case of references, are also handled
4362 // in the previous switch statement.
4383 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4390 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4760 // Note that a potential implicit null check is handled in this
4807 // Potential implicit null checks, in the case of reference
4808 // arrays, are handled in the previous switch statement.
4922 // even in the case where a class object is in the from-space
4925 // false negative, in which case we would take the ArraySet
4936 // nor the object reference in `register_value->klass`, as
5065 // Mask out most significant bit in case the array is String's array of char.
5113 // Address the length field in the array.
5120 // the string compression flag) with the in-memory length and avoid the temporary.
5176 // In suspend check slow path, usually there are no caller-save registers at all.
5178 // registers in full width (since the runtime only saves/restores lower part).
6162 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6202 // If the class reference currently in
6459 // in the following move operation, as we will need it for the
6613 // In slow path based read barriers, the read barrier call is
6614 // inserted after the original load. However, in fast path based
6628 // Note: the original implementation in ReadBarrier::Barrier is
6830 // Is the value in range?
6834 // We are in the range of the table.
6835 // Load the address of the jump table in the constant area.
6908 // Value won't fit in an int.
6982 // Patch in the right value.
6986 // Location in constant area that the fixup refers to.
6991 t * Class to handle late fixup of offsets to a jump table that will be created in the
7107 // Didn't fit in a register. Do it in pieces.