Home | History | Annotate | Download | only in mips

Lines Matching refs:HeapNumber

472   STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
473 __ And(exponent, source_, Operand(HeapNumber::kSignMask));
485 HeapNumber::kExponentBias << HeapNumber::kExponentShift;
499 __ li(mantissa, Operand(31 + HeapNumber::kExponentBias));
501 __ sll(mantissa, mantissa, HeapNumber::kExponentShift);
509 __ sll(mantissa, source_, HeapNumber::kMantissaBitsInTopWord);
511 __ srl(source_, source_, 32 - HeapNumber::kMantissaBitsInTopWord);
595 // Load the double from tagged HeapNumber to double register.
597 // ARM uses a workaround here because of the unaligned HeapNumber
600 __ ldc1(dst, FieldMemOperand(object, HeapNumber::kValueOffset));
604 __ lw(dst1, FieldMemOperand(object, HeapNumber::kValueOffset));
606 HeapNumber::kValueOffset + kPointerSize));
653 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset));
664 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
665 __ lw(scratch2, FieldMemOperand(object, HeapNumber::kMantissaOffset));
709 __ And(dst2, int_scratch, Operand(HeapNumber::kSignMask));
724 __ Addu(scratch2, dst1, Operand(HeapNumber::kExponentBias));
726 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
736 __ Subu(scratch2, dst1, Operand(HeapNumber::kMantissaBitsInTopWord));
747 __ li(at, HeapNumber::kMantissaBitsInTopWord);
795 __ ldc1(double_dst, FieldMemOperand(object, HeapNumber::kValueOffset));
815 __ lw(dst2, FieldMemOperand(object, HeapNumber::kExponentOffset));
816 __ lw(dst1, FieldMemOperand(object, HeapNumber::kMantissaOffset));
819 __ And(scratch1, dst1, Operand(~HeapNumber::kSignMask));
828 __ lw(dst2, FieldMemOperand(object, HeapNumber::kExponentOffset));
829 __ lw(dst1, FieldMemOperand(object, HeapNumber::kMantissaOffset));
867 __ ldc1(double_scratch, FieldMemOperand(object, HeapNumber::kValueOffset));
885 __ lw(scratch2, FieldMemOperand(object, HeapNumber::kExponentOffset));
886 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMantissaOffset));
889 __ And(dst, scratch1, Operand(~HeapNumber::kSignMask));
907 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
908 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask));
928 HeapNumber::kExponentShift,
929 HeapNumber::kExponentBits);
932 __ Subu(scratch, scratch, Operand(HeapNumber::kExponentBias));
963 HeapNumber::kMantissaBitsInTopWord,
964 32 - HeapNumber::kMantissaBitsInTopWord);
965 __ sll(at, src1, HeapNumber::kNonMantissaBitsInTopWord);
1016 __ sdc1(f0, FieldMemOperand(heap_number_result, HeapNumber::kValueOffset));
1019 __ sw(v1, FieldMemOperand(heap_number_result, HeapNumber::kExponentOffset));
1020 __ sw(v0, FieldMemOperand(heap_number_result, HeapNumber::kMantissaOffset));
1063 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
1071 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
1082 ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
1083 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
1087 HeapNumber::kExponentOffset));
1090 HeapNumber::kMantissaOffset));
1098 non_smi_exponent += 1 << HeapNumber::kExponentShift;
1099 __ li(scratch_, Operand(HeapNumber::kSignMask | non_smi_exponent));
1101 FieldMemOperand(the_heap_number_, HeapNumber::kExponentOffset));
1104 FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset));
1125 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask));
1183 __ lw(t2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1190 __ sll(t2, t2, HeapNumber::kNonMantissaBitsInTopWord);
1192 __ lw(t3, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
1248 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset));
1251 __ lw(a3, FieldMemOperand(lhs, HeapNumber::kValueOffset + 4));
1252 __ lw(a2, FieldMemOperand(lhs, HeapNumber::kValueOffset));
1287 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1297 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4));
1298 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1300 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1301 __ lw(a1, FieldMemOperand(rhs, HeapNumber
1309 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset);
1330 __ li(exp_mask_reg, HeapNumber::kExponentMask);
1334 __ sll(t5, lhs_exponent, HeapNumber::kNonMantissaBitsInTopWord);
1339 __ li(exp_mask_reg, HeapNumber::kExponentMask);
1345 __ sll(t5, rhs_exponent, HeapNumber::kNonMantissaBitsInTopWord);
1376 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset);
1510 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset));
1511 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1513 __ lw(a2, FieldMemOperand(lhs, HeapNumber::kValueOffset));
1514 __ lw(a3, FieldMemOperand(lhs, HeapNumber::kValueOffset + 4));
1516 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4));
1517 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1519 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1520 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4));
1614 Operand(HeapNumber::kValueOffset - kHeapObjectTag));
1629 __ ldc1(f12, FieldMemOperand(object, HeapNumber::kValueOffset));
1630 __ ldc1(f14, FieldMemOperand(probe, HeapNumber::kValueOffset));
1716 // be strictly equal if the other is a HeapNumber.
1928 __ ldc1(f2, FieldMemOperand(tos_, HeapNumber::kValueOffset));
2151 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2152 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
2153 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2169 __ lw(a3, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
2170 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2171 __ sw(a3, FieldMemOperand(a1, HeapNumber::kMantissaOffset));
2172 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
2173 __ sw(a2, FieldMemOperand(a1, HeapNumber::kExponentOffset));
2233 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2604 // ARM uses a workaround here because of the unaligned HeapNumber
2607 __ sdc1(f10, FieldMemOperand(result, HeapNumber::kValueOffset));
2724 // ARM uses a workaround here because of the unaligned HeapNumber
2727 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2951 __ And(scratch2, scratch2, HeapNumber::kSignMask);
2976 __ sdc1(f10, FieldMemOperand(v0, HeapNumber::kValueOffset));
3128 __ sdc1(double_scratch, FieldMemOperand(v0, HeapNumber::kValueOffset));
3363 // Check if input is a HeapNumber.
3369 // Input is a HeapNumber. Store the
3371 __ lw(a2, FieldMemOperand(a0, HeapNumber::kValueOffset));
3372 __ lw(a3, FieldMemOperand(a0, HeapNumber::kValueOffset + 4));
3440 __ ldc1(f4, FieldMemOperand(t2, HeapNumber::kValueOffset));
3475 __ sdc1(f4, FieldMemOperand(t2, HeapNumber::kValueOffset));
3489 __ sdc1(f4, FieldMemOperand(a0, HeapNumber::kValueOffset));
3495 __ ldc1(f4, FieldMemOperand(v0, HeapNumber::kValueOffset));
3510 // Allocate an aligned object larger than a HeapNumber.
3511 ASSERT(4 * kPointerSize >= HeapNumber::kSize);
3589 const Register heapnumber = v0;
3613 __ ldc1(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
3626 FieldMemOperand(exponent, HeapNumber::kValueOffset));
3632 FieldMemOperand(exponent, HeapNumber::kValueOffset));
3774 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
3776 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
3777 ASSERT(heapnumber.is(v0));
6804 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset));
6806 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset));