Lines Matching defs:__
39 #define __ ACCESS_MASM(masm)
74 __ add(offset_scratch, offset, Operand(offset, LSL, 1));
77 __ mov(base_addr, Operand(key_offset));
78 __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2));
81 __ ldr(ip, MemOperand(base_addr, 0));
82 __ cmp(name, ip);
83 __ b(ne, &miss);
86 __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
87 __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
88 __ cmp(ip, scratch2);
89 __ b(ne, &miss);
94 __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
99 __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
103 ASSERT(__ ImmediateFitsAddrMode1Instruction(mask));
104 __ bic(flags_reg, flags_reg, Operand(mask));
107 __ cmn(flags_reg, Operand(-flags));
109 __ cmp(flags_reg, Operand(flags));
111 __ b(ne, &miss);
115 __ jmp(&miss);
117 __ jmp(&miss);
122 __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
125 __ bind(&miss);
142 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
143 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
152 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
153 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
154 __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
155 __ b(ne, miss_label);
158 __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
159 __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
160 __ b(lt, miss_label);
164 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
166 __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
168 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
169 __ cmp(map, tmp);
170 __ b(ne, miss_label);
173 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
183 __ bind(&done);
184 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
224 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
228 __ JumpIfSmi(receiver, &miss);
231 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
232 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
233 __ add(scratch, scratch, Operand(ip));
237 __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize));
240 __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
242 __ and_(scratch, scratch, Operand(mask));
257 __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
259 __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
260 __ and_(scratch, scratch, Operand(mask2));
276 __ bind(&miss);
277 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
286 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
288 __ ldr(prototype,
291 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
293 __ ldr(prototype,
296 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
307 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
308 __ Move(ip, isolate->global());
309 __ cmp(prototype, ip);
310 __ b(ne, miss);
315 __ Move(prototype, Handle<Map>(function->initial_map()));
317 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
334 __ ldr(dst, FieldMemOperand(src, offset));
338 __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
339 __ ldr(dst, FieldMemOperand(dst, offset));
349 __ JumpIfSmi(receiver, miss_label);
352 __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
353 __ b(ne, miss_label);
356 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
357 __ Ret();
371 __ JumpIfSmi(receiver, smi);
374 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
375 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
376 __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
378 __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
379 __ b(ne, non_string_object);
401 __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
402 __ Ret();
406 __ bind(&check_wrapper);
407 __ cmp(scratch1, Operand(JS_VALUE_TYPE));
408 __ b(ne, miss);
411 __
413 __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
414 __ Ret();
424 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
425 __ mov(r0, scratch1);
426 __ Ret();
448 __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label,
453 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
464 __ push(receiver_reg);
465 __ mov(r2, Operand(transition));
466 __ Push(r2, r0);
467 __ TailCallExternalReference(
478 __ mov(ip, Operand(transition));
479 __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
490 __ str(r0, FieldMemOperand(receiver_reg, offset));
493 __ JumpIfSmi(r0, &exit);
497 __ mov(name_reg, r0);
498 __ RecordWriteField(receiver_reg,
508 __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
509 __ str(r0, FieldMemOperand(scratch, offset));
512 __ JumpIfSmi(r0, &exit);
516 __ mov(name_reg, r0);
517 __ RecordWriteField(scratch,
526 __ bind(&exit);
527 __ Ret();
536 __ Jump(code, RelocInfo::CODE_TARGET);
551 __ JumpIfSmi(r1, miss);
552 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
553 __ b(ne, miss);
558 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
559 __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
566 __ InvokeFunction(r1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
575 __ push(name);
579 __ mov(scratch, Operand(interceptor));
580 __ push(scratch);
581 __ push(receiver);
582 __ push(holder);
583 __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
584 __ push(scratch);
599 __ mov(r0, Operand(5));
600 __ mov(r1, Operand(ref));
603 __ CallStub(&stub);
615 __ mov(scratch, Operand(Smi::FromInt(0)));
617 __ push(scratch);
624 __ Drop(kFastApiCallArguments);
642 __ LoadHeapObject(r5, function);
643 __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
649 __ Move(r0, api_call_info);
650 __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
652 __ Move(r6, call_data);
655 __ stm(ib, sp, r5.bit() | r6.bit());
659 __ add(r2, sp, Operand(2 * kPointerSize));
664 __ EnterExitFrame(false, kApiStackSpace);
668 __ add(r0, sp, Operand(1 * kPointerSize));
670 __ str(r2, MemOperand(r0, 0 * kPointerSize));
672 __ add(ip, r2, Operand(argc * kPointerSize));
673 __ str(ip, MemOperand(r0, 1 * kPointerSize));
675 __ mov(ip, Operand(argc));
676 __ str(ip, MemOperand(r0, 2 * kPointerSize));
678 __ mov(ip, Operand(0));
679 __ str(ip, MemOperand(r0, 3 * kPointerSize));
689 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
718 __ JumpIfSmi(receiver, miss);
759 __ IncrementCounter(counters->call_const_interceptor(), 1,
763 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
808 __ InvokeFunction(optimization.constant_function(), arguments_,
814 __ bind(&miss_cleanup);
816 __ b(miss_label);
820 __ bind(®ular_invoke);
843 __ push(name_);
845 __ CallExternalReference(
850 __ pop(name_);
862 __ Push(holder, name_);
868 __ pop(name_); // Restore the name.
869 __ pop(receiver); // Restore the holder.
872 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
873 __ cmp(r0, scratch);
874 __ b(ne, interceptor_succeeded);
895 __ mov(scratch, Operand(cell));
896 __ ldr(scratch,
898 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
899 __ cmp(scratch, ip);
900 __ b(ne, miss);
938 __ vmov(s0, ival);
939 __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
940 __ vcvt_f32_s32(s0, s0);
941 __ vstr(s0, scratch1, 0);
949 __ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
951 __ rsb(ival, ival, Operand(0, RelocInfo::NONE), LeaveCC, ne);
956 __ cmp(ival, Operand(1));
957 __ b(gt, ¬_special);
963 __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq);
964 __ b(&done);
966 __ bind(¬_special);
970 __ CountLeadingZeros(zeros, ival, scratch1);
973 __ rsb(scratch1,
977 __ orr(fval,
982 __ add(zeros, zeros, Operand(1));
984 __ mov(ival, Operand(ival, LSL, zeros));
986 __ orr(fval,
990 __ bind(&done);
991 __ str(fval, MemOperand(dst, wordoffset, LSL, 2));
1015 __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift));
1017 __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word));
1018 __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word));
1020 __ mov(loword, Operand(0, RelocInfo::NONE));
1021 __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word));
1027 __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift));
1032 #undef __
1033 #define __ ACCESS_MASM(masm())
1055 __ str(reg, MemOperand(sp));
1081 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1083 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1086 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK,
1093 __ CheckAccessGlobalProxy(reg, scratch2, miss);
1100 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1103 __ mov(reg, Operand(prototype));
1108 __ str(reg, MemOperand(sp));
1119 __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
1125 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1148 __ JumpIfSmi(receiver, miss);
1154 __ Ret();
1168 __ JumpIfSmi(receiver, miss);
1175 __ LoadHeapObject(r0, value);
1176 __ Ret();
1191 __ JumpIfSmi(receiver, miss);
1199 __ push(receiver);
1200 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1202 __ Move(scratch3, callback);
1203 __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1205 __ Move(scratch3, Handle<Object>(callback->data()));
1207 __ Push(reg, scratch3, name_reg);
1208 __ mov(r0, sp); // r0 = Handle<String>
1212 __ EnterExitFrame(false, kApiStackSpace);
1216 __ str(scratch2, MemOperand(sp, 1 * kPointerSize));
1217 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
1226 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1244 __ JumpIfSmi(receiver, miss);
1282 __ Push(receiver, holder_reg, name_reg);
1284 __ Push(holder_reg, name_reg);
1297 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1298 __ cmp(r0, scratch1);
1299 __ b(eq, &interceptor_failed);
1301 __ Ret();
1303 __ bind(&interceptor_failed);
1304 __ pop(name_reg);
1305 __ pop(holder_reg);
1307 __ pop(receiver);
1330 __ Ret();
1342 __ Move(scratch2, callback);
1346 __ Push(receiver, holder_reg);
1347 __ ldr(scratch3,
1349 __ Push(scratch3, scratch2, name_reg);
1351 __ push(receiver);
1352 __ ldr(scratch3,
1354 __ Push(holder_reg, scratch3, scratch2, name_reg);
1360 __ TailCallExternalReference(ref, 5, 1);
1374 __ TailCallExternalReference(ref, 5, 1);
1381 __ cmp(r2, Operand(name));
1382 __ b(ne, miss);
1397 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1400 __ JumpIfSmi(r0, miss);
1410 __ mov(r3, Operand(cell));
1411 __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1420 __ JumpIfSmi(r1, miss);
1421 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1422 __ b(ne, miss);
1425 __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
1426 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1427 __ cmp(r4, r3);
1429 __ cmp(r1, Operand(function));
1431 __ b(ne, miss);
1440 __ Jump(code, RelocInfo::CODE_TARGET);
1459 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1461 __ JumpIfSmi(r0, &miss);
1470 __ bind(&miss);
1501 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1504 __ JumpIfSmi(receiver, &miss);
1512 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1513 __ Drop(argc + 1);
1514 __ Ret();
1524 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1527 __ CheckMap(elements,
1535 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1538 __ add(r0, r0, Operand(Smi::FromInt(argc)));
1541 __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1544 __ cmp(r0, r4);
1545 __ b(gt, &attempt_to_grow_elements);
1549 __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
1550 __ JumpIfNotSmi(r4, &with_write_barrier);
1553 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1558 __ add(end_elements, elements,
1562 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1565 __ Drop(argc + 1);
1566 __ Ret();
1568 __ bind(&with_write_barrier);
1570 __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1574 __ CheckFastObjectElements(r3, r7, ¬_fast_object);
1575 __ jmp(&fast_object);
1577 __ bind(¬_fast_object);
1578 __ CheckFastSmiOnlyElements(r3, r7, &call_builtin);
1581 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1586 __ mov(r2, receiver);
1588 __ bind(&fast_object);
1590 __ CheckFastObjectElements(r3, r3, &call_builtin);
1594 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1599 __ add(end_elements, elements,
1601 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1603 __ RecordWrite(elements,
1610 __ Drop(argc + 1);
1611 __ Ret();
1613 __ bind(&attempt_to_grow_elements);
1618 __ b(&call_builtin);
1621 __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize));
1625 __ JumpIfSmi(r2, &no_fast_elements_check);
1626 __ ldr(r7, FieldMemOperand(receiver, HeapObject::kMapOffset));
1627 __ CheckFastObjectElements(r7, r7, &call_builtin);
1628 __ bind(&no_fast_elements_check);
1638 __ add(end_elements, elements,
1640 __ add(end_elements, end_elements, Operand(kEndElementsOffset));
1641 __ mov(r7, Operand(new_space_allocation_top));
1642 __ ldr(r3, MemOperand(r7));
1643 __ cmp(end_elements, r3);
1644 __ b(ne, &call_builtin);
1646 __ mov(r9, Operand(new_space_allocation_limit));
1647 __ ldr(r9, MemOperand(r9));
1648 __ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
1649 __ cmp(r3, r9);
1650 __ b(hi, &call_builtin);
1654 __ str(r3, MemOperand(r7));
1656 __ str(r2, MemOperand(end_elements));
1658 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
1660 __ str(r3, MemOperand(end_elements, i * kPointerSize));
1664 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1665 __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
1666 __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1669 __ Drop(argc + 1);
1670 __ Ret();
1672 __ bind(&call_builtin);
1673 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1680 __ bind(&miss);
1712 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1714 __ JumpIfSmi(receiver, &miss);
1721 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1724 __ CheckMap(elements,
1731 __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1732 __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
1733 __ b(lt, &return_undefined);
1736 __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1741 __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
1742 __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1743 __ cmp(r0, r6);
1744 __ b(eq, &call_builtin);
1747 __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1750 __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1751 __ Drop(argc + 1);
1752 __ Ret();
1754 __ bind(&return_undefined);
1755 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1756 __ Drop(argc + 1);
1757 __ Ret();
1759 __ bind(&call_builtin);
1760 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1766 __ bind(&miss);
1816 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1818 __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1820 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1831 __ Drop(argc + 1);
1832 __ Ret();
1838 __ bind(&index_out_of_range);
1839 __ LoadRoot(r0, Heap::kNanValueRootIndex);
1840 __ Drop(argc + 1);
1841 __ Ret();
1844 __ bind(&miss);
1846 __ Move(r2, name);
1847 __ bind(&name_miss);
1897 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1899 __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1901 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1913 __ Drop(argc + 1);
1914 __ Ret();
1920 __ bind(&index_out_of_range);
1921 __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
1922 __ Drop(argc + 1);
1923 __ Ret();
1926 __ bind(&miss);
1928 __ Move(r2, name);
1929 __ bind(&name_miss);
1961 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1964 __ JumpIfSmi(r1, &miss);
1977 __ ldr(code, MemOperand(sp, 0 * kPointerSize));
1982 __ JumpIfNotSmi(code, &slow);
1985 __ and_(code, code, Operand(Smi::FromInt(0xffff)));
1989 __ Drop(argc + 1);
1990 __ Ret();
1997 __ bind(&slow);
1998 __ InvokeFunction(
2001 __ bind(&miss);
2038 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2040 __ JumpIfSmi(r1, &miss);
2051 __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2055 __ tst(r0, Operand(kSmiTagMask));
2056 __ Drop(argc + 1, eq);
2057 __ Ret(eq);
2059 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2071 __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
2072 __ vmov(d1, r4, r5);
2075 __ vmrs(r3);
2081 __ bic(r9, r3,
2083 __ orr(r9, r9, Operand(kRoundToMinusInf));
2084 __ vmsr(r9);
2087 __ vcvt_s32_f64(s0, d1, kFPSCRRounding);
2091 __ bic(r6, r5, Operand(HeapNumber::kSignMask));
2092 __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord));
2095 __ vmrs(r9);
2096 __ tst(r9, Operand(kVFPExceptionMask));
2097 __ b(&no_vfp_exception, eq);
2102 __ sub(r7, r6, Operand(HeapNumber::kExponentMask
2104 __ b(&restore_fpscr_and_return, eq);
2107 __ cmp(r7, Operand(HeapNumber::kMantissaBits));
2109 __ b(&restore_fpscr_and_return, ge);
2110 __ b(&wont_fit_smi);
2112 __ bind(&no_vfp_exception);
2114 __ vmov(r0, s0);
2116 __ add(r1, r0, Operand(0x40000000), SetCC);
2117 __ b(&wont_fit_smi, mi);
2120 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
2123 __ cmp(r0, Operand(0, RelocInfo::NONE));
2124 __ b(&restore_fpscr_and_return, ne);
2126 __ tst(r5, Operand(HeapNumber::kSignMask));
2129 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne);
2131 __ bind(&restore_fpscr_and_return);
2133 __ vmsr(r3);
2134 __ Drop(argc + 1);
2135 __ Ret();
2137 __ bind(&wont_fit_smi);
2139 __ vmsr(r3);
2141 __ bind(&slow);
2144 __ InvokeFunction(
2147 __ bind(&miss);
2178 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2180 __ JumpIfSmi(r1, &miss);
2191 __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2196 __ JumpIfNotSmi(r0, ¬_smi);
2200 __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
2203 __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
2208 __ b(mi, &slow);
2211 __ Drop(argc + 1);
2212 __ Ret();
2216 __ bind(¬_smi);
2217 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2218 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2223 __ tst(r1, Operand(HeapNumber::kSignMask));
2224 __ b(ne, &negative_sign);
2225 __ Drop(argc + 1);
2226 __ Ret();
2230 __ bind(&negative_sign);
2231 __ eor(r1, r1, Operand(HeapNumber::kSignMask));
2232 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2233 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2234 __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
2235 __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2236 __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2237 __ Drop(argc + 1);
2238 __ Ret();
2242 __ bind(&slow);
2243 __ InvokeFunction(
2246 __ bind(&miss);
2279 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2282 __ JumpIfSmi(r1, &miss_before_stack_reserved);
2284 __ IncrementCounter(counters->call_const(), 1, r0, r3);
2285 __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
2295 __ bind(&miss);
2298 __ bind(&miss_before_stack_reserved);
2328 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2332 __ JumpIfSmi(r1, &miss);
2340 __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2350 __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2351 __ str(r3, MemOperand(sp, argc * kPointerSize));
2358 __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
2359 __ b(ge, &miss);
2369 __ jmp(&miss);
2377 __ JumpIfSmi(r1, &fast);
2378 __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
2379 __ b(ne, &miss);
2380 __ bind(&fast);
2390 __ jmp(&miss);
2398 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2399 __ cmp(r1, ip);
2400 __ b(eq, &fast);
2401 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2402 __ cmp(r1, ip);
2403 __ b(ne, &miss);
2404 __ bind(&fast);
2414 __ jmp(&miss);
2422 __ InvokeFunction(
2426 __ bind(&miss);
2450 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2457 __ mov(r1, r0);
2459 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
2464 __ bind(&miss);
2499 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
2500 __ str(r3, MemOperand(sp, argc * kPointerSize));
2504 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2508 __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
2516 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2517 __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
2521 __ bind(&miss);
2522 __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
2543 __ bind(&miss);
2545 __ Jump(ic, RelocInfo::CODE_TARGET);
2565 __ CheckMap(r1, r3, Handle<Map>(object->map()), &miss,
2570 __ CheckAccessGlobalProxy(r1, r3, &miss);
2577 __ push(r1); // receiver
2578 __ mov(ip, Operand(callback)); // callback info
2579 __ Push(ip, r2, r0);
2585 __ TailCallExternalReference(store_callback_property, 4, 1);
2588 __ bind(&miss);
2590 __ Jump(ic, RelocInfo::CODE_TARGET);
2609 __ CheckMap(r1, r3, Handle<Map>(receiver->map()), &miss,
2614 __ CheckAccessGlobalProxy(r1, r3, &miss);
2621 __ Push(r1, r2, r0); // Receiver, name, value.
2623 __ mov(r0, Operand(Smi::FromInt(strict_mode_)));
2624 __ push(r0); // strict mode
2630 __ TailCallExternalReference(store_ic_property, 4, 1);
2633 __ bind(&miss);
2635 __ Jump(ic, RelocInfo::CODE_TARGET);
2655 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2656 __ cmp(r3, Operand(Handle<Map>(object->map())));
2657 __ b(ne, &miss);
2663 __ mov(r4, Operand(cell));
2664 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2665 __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2666 __ cmp(r5, r6);
2667 __ b(eq, &miss);
2670 __
2674 __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
2675 __ Ret();
2678 __ bind(&miss);
2679 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3);
2681 __ Jump(ic, RelocInfo::CODE_TARGET);
2698 __ JumpIfSmi(r0, &miss);
2712 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2713 __ Ret();
2715 __ bind(&miss);
2735 __ bind(&miss);
2756 __ bind(&miss);
2776 __ bind(&miss);
2798 __ bind(&miss);
2820 __ JumpIfSmi(r0, &miss);
2824 __ mov(r3, Operand(cell));
2825 __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
2829 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2830 __ cmp(r4, ip);
2831 __ b(eq, &miss);
2834 __ mov(r0, r4);
2836 __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
2837 __ Ret();
2839 __ bind(&miss);
2840 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, r1, r3);
2860 __ cmp(r0, Operand(name));
2861 __ b(ne, &miss);
2864 __ bind(&miss);
2884 __ cmp(r0, Operand(name));
2885 __ b(ne, &miss);
2889 __ bind(&miss);
2909 __ cmp(r0, Operand(name));
2910 __ b(ne, &miss);
2913 __ bind(&miss);
2933 __ cmp(r0, Operand(name));
2934 __ b(ne, &miss);
2940 __ bind(&miss);
2957 __ cmp(r0, Operand(name));
2958 __ b(ne, &miss);
2961 __ bind(&miss);
2978 __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
2981 __ cmp(r0, Operand(name));
2982 __ b(ne, &miss);
2985 __ bind(&miss);
2986 __ DecrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3004 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3007 __ cmp(r0, Operand(name));
3008 __ b(ne, &miss);
3011 __ bind(&miss);
3012 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3029 __ DispatchMap(r1, r2, receiver_map, stub, DO_SMI_CHECK);
3032 __ Jump(ic, RelocInfo::CODE_TARGET);
3048 __ JumpIfSmi(r1, &miss);
3051 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
3053 __ mov(ip, Operand(receiver_maps->at(current)));
3054 __ cmp(r2, ip);
3055 __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET, eq);
3058 __ bind(&miss);
3060 __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3080 __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4);
3083 __ cmp(r1, Operand(name));
3084 __ b(ne, &miss);
3089 __ bind(&miss);
3091 __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
3093 __ Jump(ic, RelocInfo::CODE_TARGET);
3114 __ DispatchMap(r2, r3, receiver_map, stub, DO_SMI_CHECK);
3117 __ Jump(ic, RelocInfo::CODE_TARGET);
3136 __ JumpIfSmi(r2, &miss);
3139 __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
3141 __ mov(ip, Operand(receiver_maps->at(i)));
3142 __ cmp(r3, ip);
3144 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq);
3147 __ b(ne, &next_map);
3148 __ mov(r3, Operand(transitioned_maps->at(i)));
3149 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al);
3150 __ bind(&next_map);
3154 __ bind(&miss);
3156 __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3174 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
3180 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
3181 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
3182 __ cmp(r2, r7);
3183 __ b(ne, &generic_stub_call);
3189 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
3190 __ JumpIfSmi(r2, &generic_stub_call);
3191 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
3192 __ b(ne, &generic_stub_call);
3200 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
3201 __ Check(ne, "Function constructed by construct stub.");
3209 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
3210 __ AllocateInNewSpace(r3, r4, r5, r6, &generic_stub_call, SIZE_IN_WORDS);
3220 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
3221 __ mov(r5, r4);
3223 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3225 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3227 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3231 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
3248 __ cmp(r0, Operand(arg_number));
3249 __ b(le, ¬_passed);
3251 __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
3252 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3253 __ b(&next);
3254 __ bind(¬_passed);
3256 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3257 __ bind(&next);
3261 __ mov(r2, Operand(constant));
3262 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3271 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3277 __ mov(r1, r0);
3278 __ mov(r0, r4);
3279 __ orr(r0, r0, Operand(kHeapObjectTag));
3284 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
3285 __ add(sp, sp, Operand(kPointerSize));
3287 __ IncrementCounter(counters->constructed_objects(), 1, r1, r2);
3288 __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2);
3289 __ Jump(lr);
3293 __ bind(&generic_stub_call);
3295 __ Jump(code, RelocInfo::CODE_TARGET);
3302 #undef __
3303 #define __ ACCESS_MASM(masm)
3318 __ JumpIfNotSmi(key, &miss_force_generic);
3319 __ mov(r2, Operand(key, ASR, kSmiTagSize));
3320 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
3321 __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
3322 __ Ret();
3324 __ bind(&slow);
3325 __ IncrementCounter(
3336 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3339 __ bind(&miss_force_generic);
3349 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3397 __ JumpIfNotSmi(key, &miss_force_generic);
3399 __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3403 __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3404 __ cmp(key, ip);
3406 __ b(hs, &miss_force_generic);
3408 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3418 __ ldrsb(value, MemOperand(r3, key, LSR, 1));
3422 __ ldrb(value, MemOperand(r3, key, LSR, 1));
3425 __ ldrsh(value, MemOperand(r3, key, LSL, 0));
3428 __ ldrh(value, MemOperand(r3, key, LSL, 0));
3432 __ ldr(value, MemOperand(r3, key, LSL, 1));
3437 __ add(r2, r3, Operand(key, LSL, 1));
3438 __ vldr(s0, r2, 0);
3440 __ ldr(value, MemOperand(r3, key, LSL, 1));
3446 __ add(r2, r3, Operand(key, LSL, 2));
3447 __ vldr(d0, r2, 0);
3449 __ add(r4, r3, Operand(key, LSL, 2));
3451 __ ldr(r2, MemOperand(r4, 0));
3452 __ ldr(r3, MemOperand(r4, Register::kSizeInBytes));
3478 __ cmp(value, Operand(0xC0000000));
3479 __ b(mi, &box_int);
3481 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3482 __ Ret();
3484 __ bind(&box_int);
3488 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3489 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3491 __ mov(r0, r5);
3495 __ vmov(s0, value);
3496 __ vcvt_f64_s32(d0, s0);
3497 __ sub(r3, r0, Operand(kHeapObjectTag));
3498 __ vstr(d0, r3, HeapNumber::kValueOffset);
3499 __ Ret();
3513 __ str(dst1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3514 __ str(dst2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3515 __ Ret();
3524 __ tst(value, Operand(0xC0000000));
3525 __ b(ne, &box_int);
3527 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3528 __ Ret();
3530 __ bind(&box_int);
3531 __ vmov(s0, value);
3535 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3536 __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3538 __ vcvt_f64_u32(d0, s0);
3539 __ sub(r1, r2, Operand(kHeapObjectTag));
3540 __ vstr(d0, r1, HeapNumber::kValueOffset);
3542 __ mov(r0, r2);
3543 __ Ret();
3547 __ tst(value, Operand(0x80000000));
3548 __ b(ne, &box_int_0);
3549 __ tst(value, Operand(0x40000000));
3550 __ b(ne, &box_int_1);
3552 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3553 __ Ret();
3558 __ bind(&box_int_0);
3561 __ b(&done);
3563 __ bind(&box_int_1);
3568 __ bind(&done);
3573 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3574 __ AllocateHeapNumber(r4, r5, r7, r6, &slow);
3576 __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3577 __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3579 __ mov(r0, r4);
3580 __ Ret();
3590 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3591 __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3592 __ vcvt_f64_f32(d0, s0);
3593 __ sub(r1, r2, Operand(kHeapObjectTag));
3594 __ vstr(d0, r1, HeapNumber::kValueOffset);
3596 __ mov(r0, r2);
3597 __ Ret();
3602 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3603 __ AllocateHeapNumber(r3, r4, r5, r6, &slow);
3611 __ and_(r0, value, Operand(kBinary32MantissaMask));
3615 __ mov(r1, Operand(value, LSR, kBinary32MantissaBits));
3616 __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3619 __ teq(r1, Operand(0x00));
3620 __ b(eq, &exponent_rebiased);
3622 __ teq(r1, Operand(0xff));
3623 __ mov(r1, Operand(0x7ff), LeaveCC, eq);
3624 __ b(eq, &exponent_rebiased);
3627 __ add(r1,
3631 __ bind(&exponent_rebiased);
3632 __ and_(r2, value, Operand(kBinary32SignMask));
3634 __ orr(r2, r2, Operand(r1, LSL, HeapNumber::kMantissaBitsInTopWord));
3643 __ orr(r2, r2, Operand(r0, LSR, kMantissaShiftForHiWord));
3644 __ mov(r0, Operand(r0, LSL, kMantissaShiftForLoWord));
3646 __ str(r2, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3647 __ str(r0, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3649 __ mov(r0, r3);
3650 __ Ret();
3658 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3659 __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3660 __ sub(r1, r2, Operand(kHeapObjectTag));
3661 __ vstr(d0, r1, HeapNumber::kValueOffset);
3663 __ mov(r0, r2);
3664 __ Ret();
3669 __ LoadRoot(r7, Heap::kHeapNumberMapRootIndex);
3670 __ AllocateHeapNumber(r4, r5, r6, r7, &slow);
3672 __ str(r2, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3673 __ str(r3, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3674 __ mov(r0, r4);
3675 __ Ret();
3680 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3681 __ Ret();
3685 __ bind(&slow);
3686 __ IncrementCounter(
3696 __ Push(r1, r0);
3698 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3700 __ bind(&miss_force_generic);
3703 __ Jump(stub, RelocInfo::CODE_TARGET);
3728 __ JumpIfNotSmi(key, &miss_force_generic);
3730 __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3733 __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3734 __ cmp(key, ip);
3736 __ b(hs, &miss_force_generic);
3743 __ JumpIfNotSmi(value, &slow);
3745 __ JumpIfNotSmi(value, &check_heap_number);
3747 __ SmiUntag(r5, value);
3748 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3755 __ Usat(r5, 8, Operand(r5));
3756 __ strb(r5, MemOperand(r3, key, LSR, 1));
3760 __ strb(r5, MemOperand(r3, key, LSR, 1));
3764 __ strh(r5, MemOperand(r3, key, LSL, 0));
3768 __ str(r5, MemOperand(r3, key, LSL, 1));
3772 __ SmiUntag(r4, key);
3776 __ add(r3, r3, Operand(key, LSL, 2));
3790 __ vstr(d0, r3, 0);
3792 __ str(r6, MemOperand(r3, 0));
3793 __ str(r7, MemOperand(r3, Register::kSizeInBytes));
3806 __ Ret();
3810 __ bind(&check_heap_number);
3811 __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
3812 __ b(ne, &slow);
3814 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3827 __ sub(r5, r0, Operand(kHeapObjectTag));
3828 __ vldr(d0, r5, HeapNumber::kValueOffset);
3829 __ add(r5, r3, Operand(key, LSL, 1));
3830 __ vcvt_f32_f64(s0, d0);
3831 __ vstr(s0, r5, 0);
3833 __ sub(r5, r0, Operand(kHeapObjectTag));
3834 __ vldr(d0, r5, HeapNumber::kValueOffset);
3835 __ add(r5, r3, Operand(key, LSL, 2));
3836 __ vstr(d0, r5, 0);
3840 __ sub(r5, value, Operand(kHeapObjectTag));
3841 __ vldr(d0, r5, HeapNumber::kValueOffset);
3842 __ EmitECMATruncate(r5, d0, s2, r6, r7, r9);
3847 __ strb(r5, MemOperand(r3, key, LSR, 1));
3851 __ strh(r5, MemOperand(r3, key, LSL, 0));
3855 __ str(r5, MemOperand(r3, key, LSL, 1));
3872 __ Ret();
3875 __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
3876 __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3888 __ mov(r7, Operand(HeapNumber::kExponentMask));
3889 __ and_(r9, r5, Operand(r7), SetCC);
3890 __ b(eq, &nan_or_infinity_or_zero);
3892 __ teq(r9, Operand(r7));
3893 __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
3894 __ b(eq, &nan_or_infinity_or_zero);
3897 __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3898 __ add(r9,
3902 __ cmp(r9, Operand(kBinary32MaxExponent));
3903 __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
3904 __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
3905 __ b(gt, &done);
3907 __ cmp(r9, Operand(kBinary32MinExponent));
3908 __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
3909 __ b(lt, &done);
3911 __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3912 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3913 __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
3914 __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
3915 __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
3917 __ bind(&done);
3918 __ str(r5, MemOperand(r3, key, LSL, 1));
3921 __ Ret();
3923 __ bind(&nan_or_infinity_or_zero);
3924 __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3925 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3926 __ orr(r9, r9, r7);
3927 __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
3928 __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
3929 __ b(&done);
3931 __ add(r7, r3, Operand(key, LSL, 2));
3933 __ str(r6, MemOperand(r7, 0));
3934 __ str(r5, MemOperand(r7, Register::kSizeInBytes));
3935 __ Ret();
3945 __ mov(r7, Operand(HeapNumber::kExponentMask));
3946 __ and_(r9, r5, Operand(r7), SetCC);
3947 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
3948 __ b(eq, &done);
3950 __ teq(r9, Operand(r7));
3951 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
3952 __ b(eq, &done);
3955 __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3956 __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
3958 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
3959 __ b(mi, &done);
3962 __ cmp(r9, Operand(meaningfull_bits - 1));
3963 __ mov(r5, Operand(min_value), LeaveCC, ge);
3964 __ b(ge, &done);
3966 __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
3967 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3968 __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
3970 __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
3971 __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
3972 __ b(pl, &sign);
3974 __ rsb(r9, r9, Operand(0, RelocInfo::NONE));
3975 __ mov(r5, Operand(r5, LSL, r9));
3976 __ rsb(r9, r9, Operand(meaningfull_bits));
3977 __ orr(r5, r5, Operand(r6, LSR, r9));
3979 __ bind(&sign);
3980 __ teq(r7, Operand(0, RelocInfo::NONE));
3981 __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
3983 __ bind(&done);
3987 __ strb(r5, MemOperand(r3, key, LSR, 1));
3991 __ strh(r5, MemOperand(r3, key, LSL, 0));
3995 __ str(r5, MemOperand(r3, key, LSL, 1));
4013 __ bind(&slow);
4014 __ IncrementCounter(
4025 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4028 __ bind(&miss_force_generic);
4038 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4054 __ JumpIfNotSmi(r0, &miss_force_generic);
4057 __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
4058 __ AssertFastElements(r2);
4061 __ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
4062 __ cmp(r0, Operand(r3));
4063 __ b(hs, &miss_force_generic);
4066 __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4068 __ ldr(r4,
4070 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
4071 __ cmp(r4, ip);
4072 __ b(eq, &miss_force_generic);
4073 __ mov(r0, r4);
4074 __ Ret();
4076 __ bind(&miss_force_generic);
4079 __ Jump(stub, RelocInfo::CODE_TARGET);
4106 __ JumpIfNotSmi(key_reg, &miss_force_generic);
4109 __ ldr(elements_reg,
4113 __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4114 __ cmp(key_reg, Operand(scratch));
4115 __ b(hs, &miss_force_generic);
4118 __ add(indexed_double_offset, elements_reg,
4121 __ ldr(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset));
4122 __ cmp(scratch, Operand(kHoleNanUpper32));
4123 __ b(&miss_force_generic, eq);
4126 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4127 __
4132 __ str(scratch, FieldMemOperand(heap_number_reg,
4134 __ ldr(scratch, FieldMemOperand(indexed_double_offset,
4136 __ str(scratch, FieldMemOperand(heap_number_reg,
4139 __ mov(r0, heap_number_reg);
4140 __ Ret();
4142 __ bind(&slow_allocate_heapnumber);
4145 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4147 __ bind(&miss_force_generic);
4150 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4182 __ JumpIfNotSmi(key_reg, &miss_force_generic);
4185 __ JumpIfNotSmi(value_reg, &transition_elements_kind);
4189 __ ldr(elements_reg,
4192 __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4194 __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4197 __ cmp(key_reg, scratch);
4199 __ b(hs, &grow);
4201 __ b(hs, &miss_force_generic);
4205 __ CheckMap(elements_reg,
4211 __ bind(&finish_store);
4213 __ add(scratch,
4217 __ add(scratch,
4220 __ str(value_reg, MemOperand(scratch));
4223 __ add(scratch,
4227 __ add(scratch,
4230 __ str(value_reg, MemOperand(scratch));
4231 __ mov(receiver_reg, value_reg);
4232 __ RecordWrite(elements_reg, // Object.
4240 __ Ret();
4242 __ bind(&miss_force_generic);
4245 __ Jump(ic, RelocInfo::CODE_TARGET);
4247 __ bind(&transition_elements_kind);
4249 __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4253 __ bind(&grow);
4257 __ b(ne, &miss_force_generic);
4261 __ ldr(length_reg,
4263 __ ldr(elements_reg,
4265 __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
4266 __ b(ne, &check_capacity);
4269 __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
4272 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
4273 __ str(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset));
4274 __ mov(scratch, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
4275 __ str(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4276 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
4278 __ str(scratch, FieldMemOperand(elements_reg, FixedArray::SizeFor(i)));
4282 __ str(value_reg, FieldMemOperand(elements_reg, FixedArray::SizeFor(0)));
4285 __ str(elements_reg,
4287 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
4292 __ mov(length_reg, Operand(Smi::FromInt(1)));
4293 __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4294 __ Ret();
4296 __ bind(&check_capacity);
4298 __ CheckMap(elements_reg,
4304 __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4305 __ cmp(length_reg, scratch);
4306 __ b(hs, &slow);
4309 __ add(length_reg, length_reg, Operand(Smi::FromInt(1)));
4310 __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4311 __ jmp(&finish_store);
4313 __ bind(&slow);
4315 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4348 __ JumpIfNotSmi(key_reg, &miss_force_generic);
4350 __ ldr(elements_reg,
4355 __ ldr(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4357 __ ldr(scratch1,
4362 __ cmp(key_reg, scratch1);
4364 __ b(hs, &grow);
4366 __ b(hs, &miss_force_generic);
4369 __ bind(&finish_store);
4370 __ StoreNumberToDoubleElements(value_reg,
4379 __ Ret();
4382 __ bind(&miss_force_generic);
4385 __ Jump(ic, RelocInfo::CODE_TARGET);
4387 __ bind(&transition_elements_kind);
4389 __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4393 __ bind(&grow);
4397 __ b(ne, &miss_force_generic);
4401 __ JumpIfSmi(value_reg, &value_is_smi);
4402 __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
4403 __ CompareRoot(scratch1, Heap::kHeapNumberMapRootIndex);
4404 __ b(ne, &transition_elements_kind);
4405 __ bind(&value_is_smi);
4409 __ ldr(length_reg,
4411 __ ldr(elements_reg,
4413 __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
4414 __ b(ne, &check_capacity);
4417 __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
4422 __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
4423 __ str(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset));
4424 __ mov(scratch1,
4426 __ str(scratch1,
4430 __ str(elements_reg,
4432 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
4437 __ mov(length_reg, Operand(Smi::FromInt(1)));
4438 __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4439 __ ldr(elements_reg,
4441 __ jmp(&finish_store);
4443 __ bind(&check_capacity);
4445 __ ldr(scratch1,
4447 __ cmp(length_reg, scratch1);
4448 __ b(hs, &slow);
4451 __ add(length_reg, length_reg, Operand(Smi::FromInt(1)));
4452 __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4453 __ jmp(&finish_store);
4455 __ bind(&slow);
4457 __ Jump(ic_slow, RelocInfo::CODE_TARGET);
4462 #undef __