Home | History | Annotate | Download | only in mips

Lines Matching refs:__

39 #define __ ACCESS_MASM(masm)
74 __ sll(offset_scratch, offset, 1);
75 __ Addu(offset_scratch, offset_scratch, offset);
78 __ li(base_addr, Operand(key_offset));
79 __ sll(at, offset_scratch, kPointerSizeLog2);
80 __ Addu(base_addr, base_addr, at);
83 __ lw(at, MemOperand(base_addr, 0));
84 __ Branch(&miss, ne, name, Operand(at));
87 __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
88 __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
89 __ Branch(&miss, ne, at, Operand(scratch2));
94 __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
99 __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
100 __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
101 __ Branch(&miss, ne, flags_reg, Operand(flags));
105 __ jmp(&miss);
107 __ jmp(&miss);
112 __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
113 __ Jump(at);
116 __ bind(&miss);
133 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
134 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
143 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
144 __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
145 __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
146 __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
149 __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
150 __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
154 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
156 __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
158 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
159 __ Branch(miss_label, ne, map, Operand(tmp));
162 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
172 __ bind(&done);
173 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
213 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
217 __ JumpIfSmi(receiver, &miss);
220 __ lw(scratch, FieldMemOperand(name, String::kHashFieldOffset));
221 __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
222 __ Addu(scratch, scratch, at);
226 __ srl(scratch, scratch, kHeapObjectTagSize);
227 __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
228 __ And(scratch, scratch, Operand(mask));
243 __ srl(at, name, kHeapObjectTagSize);
244 __ Subu(scratch, scratch, at);
246 __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
247 __ And(scratch, scratch, Operand(mask2));
263 __ bind(&miss);
264 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
273 __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
275 __ lw(prototype,
278 __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
280 __ lw(prototype,
283 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
294 __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
296 __ li(at, isolate->global());
297 __ Branch(miss, ne, prototype, Operand(at));
302 __ li(prototype, Handle<Map>(function->initial_map()));
304 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
321 __ lw(dst, FieldMemOperand(src, offset));
325 __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
326 __ lw(dst, FieldMemOperand(dst, offset));
336 __ JumpIfSmi(receiver, miss_label);
339 __ GetObjectType(receiver, scratch, scratch);
340 __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
343 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
344 __ Ret();
358 __ JumpIfSmi(receiver, smi, t0);
361 __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
362 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
363 __ And(scratch2, scratch1, Operand(kIsNotStringMask));
365 __ Branch(non_string_object,
390 __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
391 __ Ret();
395 __ bind(&check_wrapper);
396 __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
399 __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
401 __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
402 __ Ret();
412 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
413 __ mov(v0, scratch1);
414 __ Ret();
434 __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label,
439 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
450 __ push(receiver_reg);
451 __ li(a2, Operand(transition));
452 __ Push(a2, a0);
453 __ TailCallExternalReference(
463 __ li(t0, Operand(transition));
464 __ sw(t0, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
475 __ sw(a0, FieldMemOperand(receiver_reg, offset));
478 __ JumpIfSmi(a0, &exit, scratch);
482 __ mov(name_reg, a0);
483 __ RecordWriteField(receiver_reg,
493 __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
494 __ sw(a0, FieldMemOperand(scratch, offset));
497 __ JumpIfSmi(a0, &exit);
501 __ mov(name_reg, a0);
502 __ RecordWriteField(scratch,
511 __ bind(&exit);
512 __ mov(v0, a0);
513 __ Ret();
522 __ Jump(code, RelocInfo::CODE_TARGET);
536 __ JumpIfSmi(a1, miss);
537 __ GetObjectType(a1, a3, a3);
538 __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
543 __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
544 __ sw(a3, MemOperand(sp, arguments.immediate() * kPointerSize));
551 __ InvokeFunction(a1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
560 __ push(name);
564 __ li(scratch, Operand(interceptor));
565 __ Push(scratch, receiver, holder);
566 __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
567 __ push(scratch);
582 __ PrepareCEntryArgs(5);
583 __ PrepareCEntryFunction(ref);
586 __ CallStub(&stub);
601 __ push(zero_reg);
608 __ Drop(kFastApiCallArguments);
626 __ LoadHeapObject(t1, function);
627 __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
633 __ li(a0, api_call_info);
634 __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
636 __ li(t2, call_data);
640 __ sw(t1, MemOperand(sp, 1 * kPointerSize));
641 __ sw(t2, MemOperand(sp, 2 * kPointerSize));
645 __ Addu(a2, sp, Operand(2 * kPointerSize));
650 __ EnterExitFrame(false, kApiStackSpace);
659 __ Addu(a1, sp, kPointerSize);
662 __ sw(a2, MemOperand(a1, 0 * kPointerSize));
664 __ Addu(t0, a2, Operand(argc * kPointerSize));
665 __ sw(t0, MemOperand(a1, 1 * kPointerSize));
667 __ li(t0, Operand(argc));
668 __ sw(t0, MemOperand(a1, 2 * kPointerSize));
670 __ sw(zero_reg, MemOperand(a1, 3 * kPointerSize));
680 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
708 __ JumpIfSmi(receiver, miss);
749 __ IncrementCounter(counters->call_const_interceptor(), 1,
753 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
798 __ InvokeFunction(optimization.constant_function(), arguments_,
804 __ bind(&miss_cleanup);
806 __ Branch(miss_label);
810 __ bind(&regular_invoke);
833 __ push(name_);
837 __ CallExternalReference(
843 __ pop(name_);
856 __ Push(holder, name_);
862 __ pop(name_); // Restore the name.
863 __ pop(receiver); // Restore the holder.
866 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
867 __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
889 __ li(scratch, Operand(cell));
890 __ lw(scratch,
892 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
893 __ Branch(miss, ne, scratch, Operand(at));
931 __ mtc1(ival, f0);
932 __ cvt_s_w(f0, f0);
933 __ sll(scratch1, wordoffset, 2);
934 __ addu(scratch1, dst, scratch1);
935 __ swc1(f0, MemOperand(scratch1, 0));
945 __ And(fval, ival, Operand(kBinary32SignMask));
947 __ subu(scratch1, zero_reg, ival);
948 __ Movn(ival, scratch1, fval);
953 __ Branch(&not_special, gt, ival, Operand(1));
959 __ Xor(scratch1, ival, Operand(1));
960 __ li(scratch2, exponent_word_for_1);
961 __ or_(scratch2, fval, scratch2);
962 __ Movz(fval, scratch2, scratch1); // Only if ival is equal to 1.
963 __ Branch(&done);
965 __ bind(&not_special);
969 __ Clz(zeros, ival);
972 __ li(scratch1, (kBitsPerInt - 1) + kBinary32ExponentBias);
973 __ subu(scratch1, scratch1, zeros);
975 __ sll(scratch1, scratch1, kBinary32ExponentShift);
976 __ or_(fval, fval, scratch1);
979 __ Addu(zeros, zeros, Operand(1));
981 __ sllv(ival, ival, zeros);
983 __ srl(scratch1, ival, kBitsPerInt - kBinary32MantissaBits);
984 __ or_(fval, fval, scratch1);
986 __ bind(&done);
988 __ sll(scratch1, wordoffset, 2);
989 __ addu(scratch1, dst, scratch1);
990 __ sw(fval, MemOperand(scratch1, 0));
1014 __ li(scratch, biased_exponent << HeapNumber::kExponentShift);
1016 __ sll(loword, hiword, mantissa_shift_for_lo_word);
1017 __ srl(hiword, hiword, mantissa_shift_for_hi_word);
1018 __ or_(hiword, scratch, hiword);
1020 __ mov(loword, zero_reg);
1021 __ sll(hiword, hiword, mantissa_shift_for_hi_word);
1022 __ or_(hiword, scratch, hiword);
1028 __ li(scratch, 1 << HeapNumber::kExponentShift);
1029 __ nor(scratch, scratch, scratch);
1030 __ and_(hiword, hiword, scratch);
1035 #undef __
1036 #define __ ACCESS_MASM(masm())
1058 __ sw(reg, MemOperand(sp));
1084 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1086 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1089 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK,
1095 __ CheckAccessGlobalProxy(reg, scratch2, miss);
1102 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1105 __ li(reg, Operand(prototype));
1110 __ sw(reg, MemOperand(sp));
1121 __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
1127 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1150 __ JumpIfSmi(receiver, miss);
1156 __ Ret();
1170 __ JumpIfSmi(receiver, miss, scratch1);
1177 __ LoadHeapObject(v0, value);
1178 __ Ret();
1193 __ JumpIfSmi(receiver, miss, scratch1);
1201 __ push(receiver);
1202 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1204 __ li(scratch3, callback);
1205 __ lw(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1207 __ li(scratch3, Handle<Object>(callback->data()));
1209 __ Push(reg, scratch3, name_reg);
1210 __ mov(a2, scratch2); // Saved in case scratch2 == a1.
1211 __ mov(a1, sp); // a1 (first argument - see note below) = Handle<String>
1220 __ EnterExitFrame(false, kApiStackSpace);
1224 __ sw(a2, MemOperand(sp, kPointerSize));
1226 __ Addu(a2, sp, kPointerSize);
1235 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1253 __ JumpIfSmi(receiver, miss);
1284 __ Push(receiver, holder_reg, name_reg);
1286 __ Push(holder_reg, name_reg);
1299 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1300 __ Branch(&interceptor_failed, eq, v0, Operand(scratch1));
1302 __ Ret();
1304 __ bind(&interceptor_failed);
1305 __ pop(name_reg);
1306 __ pop(holder_reg);
1308 __ pop(receiver);
1331 __ Ret();
1343 __ li(scratch2, callback);
1347 __ Push(receiver, holder_reg);
1348 __ lw(scratch3,
1350 __ Push(scratch3, scratch2, name_reg);
1352 __ push(receiver);
1353 __ lw(scratch3,
1355 __ Push(holder_reg, scratch3, scratch2, name_reg);
1361 __ TailCallExternalReference(ref, 5, 1);
1374 __ TailCallExternalReference(ref, 5, 1);
1381 __ Branch(miss, ne, a2, Operand(name));
1396 __ lw(a0, MemOperand(sp, argc * kPointerSize));
1399 __ JumpIfSmi(a0, miss);
1409 __ li(a3, Operand(cell));
1410 __ lw(a1, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
1419 __ JumpIfSmi(a1, miss);
1420 __ GetObjectType(a1, a3, a3);
1421 __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
1424 __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1425 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1426 __ Branch(miss, ne, t0, Operand(a3));
1428 __ Branch(miss, ne, a1, Operand(function));
1438 __ Jump(code, RelocInfo::CODE_TARGET);
1457 __ lw(a0, MemOperand(sp, argc * kPointerSize));
1459 __ JumpIfSmi(a0, &miss, t0);
1468 __ bind(&miss);
1501 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1504 __ JumpIfSmi(receiver, &miss);
1512 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1513 __ Drop(argc + 1);
1514 __ Ret();
1523 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1526 __ CheckMap(elements,
1533 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1536 __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
1539 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1542 __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0));
1546 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1547 __ JumpIfNotSmi(t0, &with_write_barrier);
1550 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1555 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1556 __ Addu(end_elements, elements, end_elements);
1559 __ Addu(end_elements, end_elements, kEndElementsOffset);
1560 __ sw(t0, MemOperand(end_elements));
1563 __ Drop(argc + 1);
1564 __ Ret();
1566 __ bind(&with_write_barrier);
1568 __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1572 __ CheckFastObjectElements(a3, t3, &not_fast_object);
1573 __ jmp(&fast_object);
1575 __ bind(&not_fast_object);
1576 __ CheckFastSmiOnlyElements(a3, t3, &call_builtin);
1579 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1584 __ mov(a2, receiver);
1586 __ bind(&fast_object);
1588 __ CheckFastObjectElements(a3, a3, &call_builtin);
1592 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1597 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1598 __ Addu(end_elements, elements, end_elements);
1599 __ Addu(end_elements, end_elements, kEndElementsOffset);
1600 __ sw(t0, MemOperand(end_elements));
1602 __ RecordWrite(elements,
1609 __ Drop(argc + 1);
1610 __ Ret();
1612 __ bind(&attempt_to_grow_elements);
1617 __ Branch(&call_builtin);
1620 __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize));
1624 __ JumpIfSmi(a2, &no_fast_elements_check);
1625 __ lw(t3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1626 __ CheckFastObjectElements(t3, t3, &call_builtin);
1627 __ bind(&no_fast_elements_check);
1638 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1639 __ Addu(end_elements, elements, end_elements);
1640 __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1641 __ li(t3, Operand(new_space_allocation_top));
1642 __ lw(a3, MemOperand(t3));
1643 __ Branch(&call_builtin, ne, end_elements, Operand(a3));
1645 __ li(t5, Operand(new_space_allocation_limit));
1646 __ lw(t5, MemOperand(t5));
1647 __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
1648 __ Branch(&call_builtin, hi, a3, Operand(t5));
1652 __ sw(a3, MemOperand(t3));
1654 __ sw(a2, MemOperand(end_elements));
1656 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
1658 __ sw(a3, MemOperand(end_elements, i * kPointerSize));
1662 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1663 __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
1664 __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1667 __ Drop(argc + 1);
1668 __ Ret();
1670 __ bind(&call_builtin);
1671 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1678 __ bind(&miss);
1710 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1712 __ JumpIfSmi(receiver, &miss);
1719 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1722 __ CheckMap(elements,
1729 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1730 __ Subu(t0, t0, Operand(Smi::FromInt(1)));
1731 __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
1734 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1739 __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
1740 __ Addu(elements, elements, t1);
1741 __ lw(v0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1742 __ Branch(&call_builtin, eq, v0, Operand(t2));
1745 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1748 __ sw(t2, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1749 __ Drop(argc + 1);
1750 __ Ret();
1752 __ bind(&return_undefined);
1753 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1754 __ Drop(argc + 1);
1755 __ Ret();
1757 __ bind(&call_builtin);
1758 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1764 __ bind(&miss);
1816 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1818 __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1820 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1831 __ Drop(argc + 1);
1832 __ Ret();
1838 __ bind(&index_out_of_range);
1839 __ LoadRoot(v0, Heap::kNanValueRootIndex);
1840 __ Drop(argc + 1);
1841 __ Ret();
1844 __ bind(&miss);
1846 __ li(a2, name);
1847 __ bind(&name_miss);
1897 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1899 __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1901 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1913 __ Drop(argc + 1);
1914 __ Ret();
1920 __ bind(&index_out_of_range);
1921 __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
1922 __ Drop(argc + 1);
1923 __ Ret();
1926 __ bind(&miss);
1928 __ li(a2, name);
1929 __ bind(&name_miss);
1961 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1964 __ JumpIfSmi(a1, &miss);
1977 __ lw(code, MemOperand(sp, 0 * kPointerSize));
1982 __ JumpIfNotSmi(code, &slow);
1985 __ And(code, code, Operand(Smi::FromInt(0xffff)));
1989 __ Drop(argc + 1);
1990 __ Ret();
1997 __ bind(&slow);
1998 __ InvokeFunction(
2001 __ bind(&miss);
2038 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2040 __ JumpIfSmi(a1, &miss);
2051 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2055 __ And(t0, v0, Operand(kSmiTagMask));
2056 __ Drop(argc + 1, eq, t0, Operand(zero_reg));
2057 __ Ret(eq, t0, Operand(zero_reg));
2059 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2066 __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2069 __ cfc1(a3, FCSR);
2071 __ ctc1(zero_reg, FCSR);
2073 __ floor_w_d(f0, f0);
2077 __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize));
2078 __ And(t2, t1, Operand(~HeapNumber::kSignMask));
2079 __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2082 __ cfc1(t5, FCSR);
2083 __ And(t5, t5, Operand(kFCSRExceptionFlagMask));
2084 __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg));
2089 __ Subu(t3, t2, Operand(HeapNumber::kExponentMask
2091 __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg));
2095 __ Branch(&restore_fcsr_and_return, ge, t3,
2097 __ Branch(&wont_fit_smi);
2099 __ bind(&no_fpu_error);
2101 __ mfc1(v0, f0);
2103 __ Addu(a1, v0, Operand(0x40000000));
2104 __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg));
2107 __ sll(v0, v0, kSmiTagSize);
2110 __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg));
2112 __ And(t0, t1, Operand(HeapNumber::kSignMask));
2115 __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg));
2116 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2118 __ bind(&restore_fcsr_and_return);
2120 __ ctc1(a3, FCSR);
2122 __ Drop(argc + 1);
2123 __ Ret();
2125 __ bind(&wont_fit_smi);
2127 __ ctc1(a3, FCSR);
2129 __ bind(&slow);
2132 __ InvokeFunction(
2135 __ bind(&miss);
2167 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2169 __ JumpIfSmi(a1, &miss);
2180 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2185 __ JumpIfNotSmi(v0, &not_smi);
2189 __ sra(t0, v0, kBitsPerInt - 1);
2190 __ Xor(a1, v0, t0);
2193 __ Subu(v0, a1, t0);
2198 __ Branch(&slow, lt, v0, Operand(zero_reg));
2201 __ Drop(argc + 1);
2202 __ Ret();
2206 __ bind(&not_smi);
2207 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2208 __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2213 __ And(t0, a1, Operand(HeapNumber::kSignMask));
2214 __ Branch(&negative_sign, ne, t0, Operand(zero_reg));
2215 __ Drop(argc + 1);
2216 __ Ret();
2220 __ bind(&negative_sign);
2221 __ Xor(a1, a1, Operand(HeapNumber::kSignMask));
2222 __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2223 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2224 __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2225 __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2226 __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2227 __ Drop(argc + 1);
2228 __ Ret();
2232 __ bind(&slow);
2233 __ InvokeFunction(
2236 __ bind(&miss);
2271 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2274 __ JumpIfSmi(a1, &miss_before_stack_reserved);
2276 __ IncrementCounter(counters->call_const(), 1, a0, a3);
2277 __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2287 __ bind(&miss);
2290 __ bind(&miss_before_stack_reserved);
2321 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2325 __ JumpIfSmi(a1, &miss);
2333 __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2343 __ lw(a3, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2344 __ sw(a3, MemOperand(sp, argc * kPointerSize));
2351 __ GetObjectType(a1, a3, a3);
2352 __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2362 __ jmp(&miss);
2370 __ JumpIfSmi(a1, &fast);
2371 __ GetObjectType(a1, a0, a0);
2372 __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
2373 __ bind(&fast);
2383 __ jmp(&miss);
2391 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2392 __ Branch(&fast, eq, a1, Operand(t0));
2393 __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2394 __ Branch(&miss, ne, a1, Operand(t0));
2395 __ bind(&fast);
2405 __ jmp(&miss);
2413 __ InvokeFunction(
2417 __ bind(&miss);
2444 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2451 __ mov(a1, v0);
2453 __ lw(a0, MemOperand(sp, argc * kPointerSize));
2458 __ bind(&miss);
2494 __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
2495 __ sw(a3, MemOperand(sp, argc * kPointerSize));
2499 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2503 __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2511 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2512 __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION,
2516 __ bind(&miss);
2517 __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
2539 __ bind(&miss);
2540 __ li(a2, Operand(Handle<String>(name))); // Restore name.
2542 __ Jump(ic, RelocInfo::CODE_TARGET);
2562 __ CheckMap(a1, a3, Handle<Map>(object->map()), &miss,
2567 __ CheckAccessGlobalProxy(a1, a3, &miss);
2574 __ push(a1); // Receiver.
2575 __ li(a3, Operand(callback)); // Callback info.
2576 __ Push(a3, a2, a0);
2582 __ TailCallExternalReference(store_callback_property, 4, 1);
2585 __ bind(&miss);
2587 __ Jump(ic, RelocInfo::CODE_TARGET);
2606 __ CheckMap(a1, a3, Handle<Map>(receiver->map()), &miss,
2611 __ CheckAccessGlobalProxy(a1, a3, &miss);
2618 __ Push(a1, a2, a0); // Receiver, name, value.
2620 __ li(a0, Operand(Smi::FromInt(strict_mode_)));
2621 __ push(a0); // Strict mode.
2627 __ TailCallExternalReference(store_ic_property, 4, 1);
2630 __ bind(&miss);
2632 __ Jump(ic, RelocInfo::CODE_TARGET);
2652 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2653 __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2659 __ li(t0, Operand(cell));
2660 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2661 __ lw(t2, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2662 __ Branch(&miss, eq, t1, Operand(t2));
2665 __ sw(a0, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2666 __ mov(v0, a0); // Stored value must be returned in v0.
2670 __ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3);
2671 __ Ret();
2674 __ bind(&miss);
2675 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, a1, a3);
2677 __ Jump(ic, RelocInfo::CODE_TARGET);
2694 __ JumpIfSmi(a0, &miss);
2707 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2708 __ Ret();
2710 __ bind(&miss);
2729 __ mov(v0, a0);
2732 __ bind(&miss);
2753 __ bind(&miss);
2773 __ bind(&miss);
2796 __ bind(&miss);
2818 __ JumpIfSmi(a0, &miss);
2822 __ li(a3, Operand(cell));
2823 __ lw(t0, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
2827 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2828 __ Branch(&miss, eq, t0, Operand(at));
2831 __ mov(v0, t0);
2833 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
2834 __ Ret();
2836 __ bind(&miss);
2837 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, a1, a3);
2857 __ Branch(&miss, ne, a0, Operand(name));
2860 __ bind(&miss);
2880 __ Branch(&miss, ne, a0, Operand(name));
2884 __ bind(&miss);
2904 __ Branch(&miss, ne, a0, Operand(name));
2907 __ bind(&miss);
2927 __ Branch(&miss, ne, a0, Operand(name));
2933 __ bind(&miss);
2950 __ Branch(&miss, ne, a0, Operand(name));
2953 __ bind(&miss);
2970 __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
2973 __ Branch(&miss, ne, a0, Operand(name));
2976 __ bind(&miss);
2977 __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
2995 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
2998 __ Branch(&miss, ne, a0, Operand(name));
3001 __ bind(&miss);
3002 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3019 __ DispatchMap(a1, a2, receiver_map, stub, DO_SMI_CHECK);
3022 __ Jump(ic, RelocInfo::CODE_TARGET);
3038 __ JumpIfSmi(a1, &miss);
3041 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
3043 __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET,
3047 __ bind(&miss);
3049 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3070 __ IncrementCounter(counters->keyed_store_field(), 1, a3, t0);
3073 __ Branch(&miss, ne, a1, Operand(name));
3078 __ bind(&miss);
3080 __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
3082 __ Jump(ic, RelocInfo::CODE_TARGET);
3103 __ DispatchMap(a2, a3, receiver_map, stub, DO_SMI_CHECK);
3106 __ Jump(ic, RelocInfo::CODE_TARGET);
3125 __ JumpIfSmi(a2, &miss);
3128 __ lw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
3131 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq,
3135 __ Branch(&next_map, ne, a3, Operand(receiver_maps->at(i)));
3136 __ li(a3, Operand(transitioned_maps->at(i)));
3137 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
3138 __ bind(&next_map);
3142 __ bind(&miss);
3144 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3160 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
3166 __ lw(t5, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
3167 __ lw(a2, FieldMemOperand(t5, SharedFunctionInfo::kDebugInfoOffset));
3168 __ Branch(&generic_stub_call, ne, a2, Operand(t7));
3174 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
3175 __ JumpIfSmi(a2, &generic_stub_call);
3176 __ GetObjectType(a2, a3, t0);
3177 __ Branch(&generic_stub_call, ne, t0, Operand(MAP_TYPE));
3185 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3186 __ Check(ne, "Function constructed by construct stub.",
3195 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
3196 __ AllocateInNewSpace(a3, t4, t5, t6, &generic_stub_call, SIZE_IN_WORDS);
3206 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
3207 __ mov(t5, t4);
3208 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
3209 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
3210 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
3211 __ Addu(t5, t5, Operand(3 * kPointerSize));
3219 __ sll(a1, a0, kPointerSizeLog2);
3220 __ Addu(a1, a1, sp);
3237 __ Branch(&not_passed, less_equal, a0, Operand(arg_number));
3239 __ lw(a2, MemOperand(a1, (arg_number + 1) * -kPointerSize));
3240 __ sw(a2, MemOperand(t5));
3241 __ Addu(t5, t5, kPointerSize);
3242 __ jmp(&next);
3243 __ bind(&not_passed);
3245 __ sw(t7, MemOperand(t5));
3246 __ Addu(t5, t5, Operand(kPointerSize));
3247 __ bind(&next);
3251 __ li(a2, Operand(constant));
3252 __ sw(a2, MemOperand(t5));
3253 __ Addu(t5, t5, kPointerSize);
3262 __ sw(t7, MemOperand(t5));
3263 __ Addu(t5, t5, kPointerSize);
3269 __ mov(a1, a0);
3270 __ mov(v0, t4);
3271 __ Or(v0, v0, Operand(kHeapObjectTag));
3276 __ sll(t0, a1, kPointerSizeLog2);
3277 __ Addu(sp, sp, t0);
3278 __ Addu(sp, sp, Operand(kPointerSize));
3280 __ IncrementCounter(counters->constructed_objects(), 1, a1, a2);
3281 __ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2);
3282 __ Ret();
3286 __ bind(&generic_stub_call);
3289 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3296 #undef __
3297 #define __ ACCESS_MASM(masm)
3312 __ JumpIfNotSmi(key, &miss_force_generic);
3313 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
3314 __ sra(a2, a0, kSmiTagSize);
3315 __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
3316 __ Ret();
3319 __ bind(&slow);
3320 __ IncrementCounter(
3331 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3334 __ bind(&miss_force_generic);
3344 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3392 __ JumpIfNotSmi(key, &miss_force_generic);
3394 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3398 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3399 __ sra(t2, key, kSmiTagSize);
3401 __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3403 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3413 __ srl(t2, key, 1);
3414 __ addu(t3, a3, t2);
3415 __ lb(value, MemOperand(t3, 0));
3419 __ srl(t2, key, 1);
3420 __ addu(t3, a3, t2);
3421 __ lbu(value, MemOperand(t3, 0));
3424 __ addu(t3, a3, key);
3425 __ lh(value, MemOperand(t3, 0));
3428 __ addu(t3, a3, key);
3429 __ lhu(value, MemOperand(t3, 0));
3433 __ sll(t2, key, 1);
3434 __ addu(t3, a3, t2);
3435 __ lw(value, MemOperand(t3, 0));
3438 __ sll(t3, t2, 2);
3439 __ addu(t3, a3, t3);
3442 __ lwc1(f0, MemOperand(t3, 0));
3444 __ lw(value, MemOperand(t3, 0));
3448 __ sll(t2, key, 2);
3449 __ addu(t3, a3, t2);
3452 __ ldc1(f0, MemOperand(t3, 0));
3455 __ lw(a2, MemOperand(t3, 0));
3456 __ lw(a3, MemOperand(t3, Register::kSizeInBytes));
3482 __ Subu(t3, value, Operand(0xC0000000)); // Non-smi value gives neg result.
3483 __ Branch(&box_int, lt, t3, Operand(zero_reg));
3485 __ sll(v0, value, kSmiTagSize);
3486 __ Ret();
3488 __ bind(&box_int);
3493 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3494 __ AllocateHeapNumber(v0, a3, t0, t1, &slow);
3498 __ mtc1(value, f0);
3499 __ cvt_d_w(f0, f0);
3500 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3501 __ Ret();
3515 __ sw(dst1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3516 __ sw(dst2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3517 __ Ret();
3526 __ And(t2, value, Operand(0xC0000000));
3527 __ Branch(&pl_box_int, ne, t2, Operand(zero_reg));
3531 __ sll(v0, value, kSmiTagSize);
3532 __ Ret();
3534 __ bind(&pl_box_int);
3538 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3539 __ AllocateHeapNumber(v0, t2, t3, t6, &slow);
3542 // __ mtc1(value, f0); // LS 32-bits.
3543 // __ mtc1(zero_reg, f1); // MS 32-bits are all zero.
3544 // __ cvt_d_l(f0, f0); // Use 64 bit conv to get correct unsigned 32-bit.
3546 __ Cvt_d_uw(f0, value, f22);
3548 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3550 __ Ret();
3554 __ And(t2, value, Operand(0x80000000));
3555 __ Branch(&box_int_0, ne, t2, Operand(zero_reg));
3556 __ And(t2, value, Operand(0x40000000));
3557 __ Branch(&box_int_1, ne, t2, Operand(zero_reg));
3560 __ sll(v0, value, kSmiTagSize);
3561 __ Ret();
3566 __ bind(&box_int_0);
3569 __ Branch(&done);
3571 __ bind(&box_int_1);
3576 __ bind(&done);
3581 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3582 __ AllocateHeapNumber(t2, t3, t5, t6, &slow);
3584 __ sw(hiword, FieldMemOperand(t2, HeapNumber::kExponentOffset));
3585 __ sw(loword, FieldMemOperand(t2, HeapNumber::kMantissaOffset));
3587 __ mov(v0, t2);
3588 __ Ret();
3598 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3599 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3601 __ cvt_d_s(f0, f0);
3602 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3603 __ Ret();
3608 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3609 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3616 __ And(t4, value, Operand(kBinary32MantissaMask));
3619 __ srl(t5, value, kBinary32MantissaBits);
3620 __ And(t5, t5, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3623 __ Branch(&exponent_rebiased, eq, t5, Operand(zero_reg));
3625 __ li(t0, 0x7ff);
3626 __ Xor(t1, t5, Operand(0xFF));
3627 __ Movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff.
3628 __ Branch(&exponent_rebiased, eq, t0, Operand(0xff));
3631 __ Addu(t5,
3635 __ bind(&exponent_rebiased);
3636 __ And(a2, value, Operand(kBinary32SignMask));
3638 __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord);
3639 __ or_(a2, a2, t0);
3648 __ srl(t0, t4, kMantissaShiftForHiWord);
3649 __ or_(a2, a2, t0);
3650 __ sll(a0, t4, kMantissaShiftForLoWord);
3652 __ sw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3653 __ sw(a0, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3654 __ Ret();
3663 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3664 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3666 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
3667 __ Ret();
3672 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3673 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3675 __ sw(a2, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3676 __ sw(a3, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3677 __ Ret();
3682 __ sll(v0, value, kSmiTagSize);
3683 __ Ret();
3687 __ bind(&slow);
3688 __ IncrementCounter(
3698 __ Push(a1, a0);
3700 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3702 __ bind(&miss_force_generic);
3705 __ Jump(stub, RelocInfo::CODE_TARGET);
3731 __ JumpIfNotSmi(key, &miss_force_generic);
3733 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3736 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3738 __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3746 __ JumpIfNotSmi(value, &slow);
3748 __ JumpIfNotSmi(value, &check_heap_number);
3750 __ SmiUntag(t1, value);
3751 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3761 __ li(v0, Operand(255));
3763 __ Branch(&done, gt, t1, Operand(v0));
3765 __ Branch(USE_DELAY_SLOT, &done, lt, t1, Operand(zero_reg));
3766 __ mov(v0, zero_reg); // In delay slot.
3767 __ mov(v0, t1); // Value is in range 0..255.
3768 __ bind(&done);
3769 __ mov(t1, v0);
3771 __ srl(t8, key, 1);
3772 __ addu(t8, a3, t8);
3773 __ sb(t1, MemOperand(t8, 0));
3778 __ srl(t8, key, 1);
3779 __ addu(t8, a3, t8);
3780 __ sb(t1, MemOperand(t8, 0));
3784 __ addu(t8, a3, key);
3785 __ sh(t1, MemOperand(t8, 0));
3789 __ sll(t8, key, 1);
3790 __ addu(t8, a3, t8);
3791 __ sw(t1, MemOperand(t8, 0));
3795 __ SmiUntag(t0, key);
3799 __ sll(t8, key, 2);
3800 __ addu(a3, a3, t8);
3814 __ sdc1(f0, MemOperand(a3, 0));
3816 __ sw(t2, MemOperand(a3, 0));
3817 __ sw(t3, MemOperand(a3, Register::kSizeInBytes));
3830 __ mov(v0, a0);
3831 __ Ret();
3835 __ bind(&check_heap_number);
3836 __ GetObjectType(value, t1, t2);
3837 __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE));
3839 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3850 __ ldc1(f0, FieldMemOperand(a0, HeapNumber::kValueOffset));
3853 __ cvt_s_d(f0, f0);
3854 __ sll(t8, key, 1);
3855 __ addu(t8, a3, t8);
3856 __ swc1(f0, MemOperand(t8, 0));
3858 __ sll(t8, key, 2);
3859 __ addu(t8, a3, t8);
3860 __ sdc1(f0, MemOperand(t8, 0));
3862 __ EmitECMATruncate(t3, f0, f2, t2, t1, t5);
3867 __ srl(t8, key, 1);
3868 __ addu(t8, a3, t8);
3869 __ sb(t3, MemOperand(t8, 0));
3873 __ addu(t8, a3, key);
3874 __ sh(t3, MemOperand(t8, 0));
3878 __ sll(t8, key, 1);
3879 __ addu(t8, a3, t8);
3880 __ sw(t3, MemOperand(t8, 0));
3897 __ mov(v0, a0);
3898 __ Ret();
3902 __ lw(t3, FieldMemOperand(value, HeapNumber::kExponentOffset));
3903 __ lw(t4, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3915 __ li(t5, HeapNumber::kExponentMask);
3916 __ and_(t6, t3, t5);
3917 __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(zero_reg));
3919 __ xor_(t1, t6, t5);
3920 __ li(t2, kBinary32ExponentMask);
3921 __ Movz(t6, t2, t1); // Only if t6 is equal to t5.
3922 __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5));
3925 __ srl(t6, t6, HeapNumber::kExponentShift);
3926 __ Addu(t6,
3930 __ li(t1, Operand(kBinary32MaxExponent));
3931 __ Slt(t1, t1, t6);
3932 __ And(t2, t3, Operand(HeapNumber::kSignMask));
3933 __ Or(t2, t2, Operand(kBinary32ExponentMask));
3934 __ Movn(t3, t2, t1); // Only if t6 is gt kBinary32MaxExponent.
3935 __ Branch(&done, gt, t6, Operand(kBinary32MaxExponent));
3937 __ Slt(t1, t6, Operand(kBinary32MinExponent));
3938 __ And(t2, t3, Operand(HeapNumber::kSignMask));
3939 __ Movn(t3, t2, t1); // Only if t6 is lt kBinary32MinExponent.
3940 __ Branch(&done, lt, t6, Operand(kBinary32MinExponent));
3942 __ And(t7, t3, Operand(HeapNumber::kSignMask));
3943 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
3944 __ sll(t3, t3, kMantissaInHiWordShift);
3945 __ or_(t7, t7, t3);
3946 __ srl(t4, t4, kMantissaInLoWordShift);
3947 __ or_(t7, t7, t4);
3948 __ sll(t6, t6, kBinary32ExponentShift);
3949 __ or_(t3, t7, t6);
3951 __ bind(&done);
3952 __ sll(t9, key, 1);
3953 __ addu(t9, a2, t9);
3954 __ sw(t3, MemOperand(t9, 0));
3958 __ mov(v0, a0);
3959 __ Ret();
3961 __ bind(&nan_or_infinity_or_zero);
3962 __ And(t7, t3, Operand(HeapNumber::kSignMask));
3963 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
3964 __ or_(t6, t6, t7);
3965 __ sll(t3, t3, kMantissaInHiWordShift);
3966 __ or_(t6, t6, t3);
3967 __ srl(t4, t4, kMantissaInLoWordShift);
3968 __ or_(t3, t6, t4);
3969 __ Branch(&done);
3971 __ sll(t8, t0, 3);
3972 __ addu(t8, a3, t8);
3974 __ sw(t4, MemOperand(t8, 0));
3975 __ sw(t3, MemOperand(t8, Register::kSizeInBytes));
3976 __ mov(v0, a0);
3977 __ Ret();
3987 __ li(t5, HeapNumber::kExponentMask);
3988 __ and_(t6, t3, t5);
3989 __ Movz(t3, zero_reg, t6); // Only if t6 is equal to zero.
3990 __ Branch(&done, eq, t6, Operand(zero_reg));
3992 __ xor_(t2, t6, t5);
3993 __ Movz(t3, zero_reg, t2); // Only if t6 is equal to t5.
3994 __ Branch(&done, eq, t6, Operand(t5));
3997 __ srl(t6, t6, HeapNumber::kExponentShift);
3998 __ Subu(t6, t6, Operand(HeapNumber::kExponentBias));
4000 __ slt(t2, t6, zero_reg);
4001 __ Movn(t3, zero_reg, t2); // Only if exponent is negative.
4002 __ Branch(&done, lt, t6, Operand(zero_reg));
4005 __ slti(t1, t6, meaningfull_bits - 1);
4006 __ li(t2, min_value);
4007 __ Movz(t3, t2, t1); // Only if t6 is ge meaningfull_bits - 1.
4008 __ Branch(&done, ge, t6, Operand(meaningfull_bits - 1));
4010 __ And(t5, t3, Operand(HeapNumber::kSignMask));
4011 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4012 __ Or(t3, t3, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4014 __ li(t9, HeapNumber::kMantissaBitsInTopWord);
4015 __ subu(t6, t9, t6);
4016 __ slt(t1, t6, zero_reg);
4017 __ srlv(t2, t3, t6);
4018 __ Movz(t3, t2, t1); // Only if t6 is positive.
4019 __ Branch(&sign, ge, t6, Operand(zero_reg));
4021 __ subu(t6, zero_reg, t6);
4022 __ sllv(t3, t3, t6);
4023 __ li(t9, meaningfull_bits);
4024 __ subu(t6, t9, t6);
4025 __ srlv(t4, t4, t6);
4026 __ or_(t3, t3, t4);
4028 __ bind(&sign);
4029 __ subu(t2, t3, zero_reg);
4030 __ Movz(t3, t2, t5); // Only if t5 is zero.
4032 __ bind(&done);
4039 __ srl(t8, key, 1);
4040 __ addu(t8, a3, t8);
4041 __ sb(t3, MemOperand(t8, 0));
4045 __ addu(t8, a3, key);
4046 __ sh(t3, MemOperand(t8, 0));
4050 __ sll(t8, key, 1);
4051 __ addu(t8, a3, t8);
4052 __ sw(t3, MemOperand(t8, 0));
4070 __ bind(&slow);
4071 __ IncrementCounter(
4082 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4085 __ bind(&miss_force_generic);
4095 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4111 __ JumpIfNotSmi(a0, &miss_force_generic, at, USE_DELAY_SLOT);
4115 __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
4116 __ AssertFastElements(a2);
4119 __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
4120 __ Branch(USE_DELAY_SLOT, &miss_force_generic, hs, a0, Operand(a3));
4123 __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4125 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
4126 __ Addu(t0, t0, a3);
4127 __ lw(t0, MemOperand(t0));
4128 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
4129 __ Branch(&miss_force_generic, eq, t0, Operand(t1));
4130 __ Ret(USE_DELAY_SLOT);
4131 __ mov(v0, t0);
4133 __ bind(&miss_force_generic);
4136 __
4163 __ JumpIfNotSmi(key_reg, &miss_force_generic);
4166 __ lw(elements_reg,
4170 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4171 __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4174 __ sll(scratch2, key_reg, kDoubleSizeLog2 - kSmiTagSize);
4175 __ Addu(indexed_double_offset, elements_reg, Operand(scratch2));
4177 __ lw(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset));
4178 __ Branch(&miss_force_generic, eq, scratch, Operand(kHoleNanUpper32));
4181 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4182 __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4187 __ sw(scratch, FieldMemOperand(heap_number_reg,
4189 __ lw(scratch, FieldMemOperand(indexed_double_offset,
4191 __ sw(scratch, FieldMemOperand(heap_number_reg,
4194 __ mov(v0, heap_number_reg);
4195 __ Ret();
4197 __ bind(&slow_allocate_heapnumber);
4200 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4202 __ bind(&miss_force_generic);
4205 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4237 __ JumpIfNotSmi(key_reg, &miss_force_generic);
4240 __ JumpIfNotSmi(value_reg, &transition_elements_kind);
4244 __ lw(elements_reg,
4247 __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4249 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4253 __ Branch(&grow, hs, key_reg, Operand(scratch));
4255 __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4259 __ CheckMap(elements_reg,
4265 __ bind(&finish_store);
4268 __ Addu(scratch,
4272 __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
4273 __ Addu(scratch, scratch, scratch2);
4274 __ sw(value_reg, MemOperand(scratch));
4277 __ Addu(scratch,
4281 __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
4282 __ Addu(scratch, scratch, scratch2);
4283 __ sw(value_reg, MemOperand(scratch));
4284 __ mov(receiver_reg, value_reg);
4286 __ RecordWrite(elements_reg, // Object.
4294 __ Ret();
4296 __ bind(&miss_force_generic);
4299 __ Jump(ic, RelocInfo::CODE_TARGET);
4301 __ bind(&transition_elements_kind);
4303 __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4307 __ bind(&grow);
4311 __ Branch(&miss_force_generic, ne, key_reg, Operand(scratch));
4315 __ lw(length_reg,
4317 __ lw(elements_reg,
4319 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4320 __ Branch(&check_capacity, ne, elements_reg, Operand(at));
4323 __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
4326 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
4327 __ sw(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset));
4328 __ li(scratch, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4329 __ sw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4330 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
4332 __ sw(scratch, FieldMemOperand(elements_reg, FixedArray::SizeFor(i)));
4336 __ sw(value_reg, FieldMemOperand(elements_reg, FixedArray::SizeFor(0)));
4339 __ sw(elements_reg,
4341 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
4346 __ li(length_reg, Operand(Smi::FromInt(1)));
4347 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4348 __ Ret();
4350 __ bind(&check_capacity);
4352 __ CheckMap(elements_reg,
4358 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4359 __ Branch(&slow, hs, length_reg, Operand(scratch));
4362 __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1)));
4363 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4364 __ jmp(&finish_store);
4366 __ bind(&slow);
4368 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4403 __ JumpIfNotSmi(key_reg, &miss_force_generic);
4405 __ lw(elements_reg,
4410 __ lw(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4412 __ lw(scratch1,
4418 __ Branch(&grow, hs, key_reg, Operand(scratch1));
4420 __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch1));
4423 __ bind(&finish_store);
4425 __ StoreNumberToDoubleElements(value_reg,
4435 __ Ret(USE_DELAY_SLOT);
4436 __ mov(v0, value_reg); // In delay slot.
4439 __ bind(&miss_force_generic);
4442 __ Jump(ic, RelocInfo::CODE_TARGET);
4444 __ bind(&transition_elements_kind);
4446 __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4450 __ bind(&grow);
4454 __ Branch(&miss_force_generic, ne, key_reg, Operand(scratch1));
4458 __ JumpIfSmi(value_reg, &value_is_smi);
4459 __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
4460 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4461 __ Branch(&transition_elements_kind, ne, scratch1, Operand(at));
4462 __ bind(&value_is_smi);
4466 __ lw(length_reg,
4468 __ lw(elements_reg,
4470 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4471 __ Branch(&check_capacity, ne, elements_reg, Operand(at));
4474 __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
4479 __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
4480 __ sw(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset));
4481 __ li(scratch1, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4482 __ sw(scratch1,
4486 __ sw(elements_reg,
4488 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
4493 __ li(length_reg, Operand(Smi::FromInt(1)));
4494 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4495 __ lw(elements_reg,
4497 __ jmp(&finish_store);
4499 __ bind(&check_capacity);
4501 __ lw(scratch1,
4503 __ Branch(&slow, hs, length_reg, Operand(scratch1));
4506 __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1)));
4507 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4508 __ jmp(&finish_store);
4510 __ bind(&slow);
4512 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4517 #undef __