Home | History | Annotate | Download | only in arm

Lines Matching refs:HeapNumber

592   STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
593 __ and_(exponent, source_, Operand(HeapNumber::kSignMask), SetCC);
605 HeapNumber::kExponentBias << HeapNumber::kExponentShift;
615 // divide the constant 31 + HeapNumber::kExponentBias, 0x41d, into two parts
618 __ rsb(mantissa, zeros_, Operand(31 + HeapNumber::kExponentBias - fudge));
622 Operand(mantissa, LSL, HeapNumber::kExponentShift));
628 __ mov(mantissa, Operand(source_, LSL, HeapNumber::kMantissaBitsInTopWord));
632 Operand(source_, LSR, 32 - HeapNumber::kMantissaBitsInTopWord));
667 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
673 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
676 __ orr(scratch_, scratch_, Operand(HeapNumber::kSignMask), LeaveCC, cs);
683 ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
684 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
687 HeapNumber::kExponentOffset));
690 HeapNumber::kMantissaOffset));
698 non_smi_exponent += 1 << HeapNumber::kExponentShift;
699 __ mov(ip, Operand(HeapNumber::kSignMask | non_smi_exponent));
700 __ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kExponentOffset));
702 __ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset));
774 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
776 __ Sbfx(r3, r2, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
782 __ mov(r2, Operand(r2, LSL, HeapNumber::kNonMantissaBitsInTopWord));
784 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
839 // Load the double from rhs, tagged HeapNumber r0, to d6.
841 __ vldr(d6, r7, HeapNumber::kValueOffset);
865 // Load the double from lhs, tagged HeapNumber r1, to d7.
867 __ vldr(d7, r7, HeapNumber::kValueOffset);
936 __ vldr(d6, r7, HeapNumber::kValueOffset);
938 __ vldr(d7, r7, HeapNumber::kValueOffset);
1025 Operand(HeapNumber::kValueOffset - kHeapObjectTag));
1041 __ vldr(d0, scratch2, HeapNumber::kValueOffset);
1043 __ vldr(d1, probe, HeapNumber::kValueOffset);
1139 // be strictly equal if the other is a HeapNumber.
1329 __ vstr(d0, FieldMemOperand(heap_number_result, HeapNumber::kValueOffset));
1332 FieldMemOperand(heap_number_result, HeapNumber::kValueOffset));
1669 __ vstr(d5, r0, HeapNumber::kValueOffset);
1767 __ vstr(d0, r3, HeapNumber::kValueOffset);
1964 __ tst(scratch2, Operand(HeapNumber::kSignMask));
1984 __ vstr(d5, r0, HeapNumber::kValueOffset);
2108 __ vstr(double_scratch, r0, HeapNumber::kValueOffset);
2316 // Check if input is a HeapNumber.
2322 // Input is a HeapNumber. Load it to a double register and store the
2324 __ vldr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset));
2390 __ vldr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset));
2419 __ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset));
2428 __ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
2434 __ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
2449 // Allocate an aligned object larger than a HeapNumber.
2450 ASSERT(4 * kPointerSize >= HeapNumber::kSize);
2525 const Register heapnumber = r0;
2550 __ vldr(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
2564 FieldMemOperand(exponent, HeapNumber::kValueOffset));
2570 FieldMemOperand(exponent, HeapNumber::kValueOffset));
2693 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
2695 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
2696 ASSERT(heapnumber.is(r0));
5896 __ vldr(d1, r2, HeapNumber::kValueOffset);
5906 __ vldr(d0, r2, HeapNumber::kValueOffset);