Lines Matching defs:a2
99 a2,
109 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
110 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset));
111 __ lw(a2, MemOperand(a2, Context::SlotOffset(map_index)));
112 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
117 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
121 __ sw(a2, FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
153 a2,
162 __ li(a2, Operand(Smi::FromInt(length)));
163 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
167 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
172 __ sw(a2, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_INDEX)));
200 v0, a1, a2, &gc, TAG_OBJECT);
209 __ LoadRoot(a2, Heap::kBlockContextMapRootIndex);
210 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
211 __ li(a2, Operand(Smi::FromInt(length)));
212 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
230 __ lw(a2, ContextOperand(cp, Context::GLOBAL_INDEX));
234 __ sw(a2, ContextOperand(v0, Context::GLOBAL_INDEX));
275 a2,
291 __ Addu(a2, v0, Operand(JSArray::kSize));
292 __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset));
296 __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize);
405 __ AllocateInNewSpace(size, v0, a1, a2, &slow_case, TAG_OBJECT);
531 __ Move(a2, a3, f14);
536 // Write Smi from a0 to a3 and a2 in double format.
538 ConvertToDoubleStub stub1(a3, a2, scratch1, scratch2);
558 // Load right operand (a0) to f12 or a2/a3.
560 a0, f14, a2, a3, heap_number_map, scratch1, scratch2, slow);
987 // a2: Right value (least significant part of mantissa).
1005 __ Move(f14, a2, a3);
1033 scratch_.is(a2) &&
1037 if (the_int_.is(a2) &&
1051 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
1052 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
1250 // Load lhs to a double in a2, a3.
1252 __ lw(a2, FieldMemOperand(lhs, HeapNumber::kValueOffset));
1291 ConvertToDoubleStub stub2(a3, a2, t6, t5);
1319 __ mov(t2, a2); // a2 has LS 32 bits of lhs.
1386 __ mov(t2, a2); // a2 has LS 32 bits of lhs.
1425 __ Move(f14, a2, a3);
1463 // Get the type of the first operand into a2 and compare it with
1465 __ GetObjectType(lhs, a2, a2);
1466 __ Branch(&first_non_object, less, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
1476 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE));
1488 __ And(t2, a2, Operand(a3));
1500 __ GetObjectType(lhs, a3, a2);
1501 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE));
1502 __ lw(a2, FieldMemOperand(rhs, HeapObject::kMapOffset));
1504 __ Branch(slow, ne, a3, Operand(a2));
1513 __ lw(a2, FieldMemOperand(lhs, HeapNumber::kValueOffset));
1536 // a2 is object type of lhs.
1540 __ And(at, a2, Operand(kIsNotStringMask));
1542 __ And(at, a2, Operand(kIsSymbolMask));
1555 __ Branch(not_both_strings, lt, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
1556 __ GetObjectType(rhs, a2, a3);
1563 __ lbu(a2, FieldMemOperand(a2, Map::kBitFieldOffset));
1565 __ and_(a0, a2, a3);
1674 GenerateLookupNumberStringCache(masm, a1, v0, a2, a3, t0, false, &runtime);
1693 __ Or(a2, a1, a0);
1694 __ JumpIfNotSmi(a2, ¬_two_smis);
1701 __ Or(a2, a1, a0);
1702 __ And(a2, a2, kSmiTagMask);
1704 a2, Operand(zero_reg));
1728 // or in GP registers (a0, a1, a2, a3) depending on the presence of the FPU.
1734 // and the right hand side if we have FPU. Otherwise a2, a3 represent
1796 // In this case a2 will contain the type of lhs_.
1808 // Assumes that a2 is the type of lhs_ on entry.
1816 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_, a2, a3, &slow);
1818 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3);
1823 a2,
1830 a2,
1965 __ li(a2, Operand(Smi::FromInt(tos_.code())));
1967 __ Push(a3, a2, a1);
2041 __ li(a2, Operand(Smi::FromInt(op_)));
2044 __ Push(v0, a2, a1, a0);
2151 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2152 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
2153 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2156 __ AllocateHeapNumber(a1, a2, a3, t2, &slow_allocate_heapnumber);
2170 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2172 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
2173 __ sw(a2, FieldMemOperand(a1, HeapNumber::kExponentOffset));
2187 __ ConvertToInt32(a0, a1, a2, a3, f0, slow);
2192 __ Addu(a2, a1, Operand(0x40000000));
2193 __ Branch(&try_float, lt, a2, Operand(zero_reg));
2204 __ AllocateHeapNumber(a2, a3, t0, t2, &slow_allocate_heapnumber);
2212 __ mov(a2, v0); // Move the new heap number into a2.
2213 // Get the heap number into v0, now that the new heap number is in a2.
2225 __ mov(v0, a2); // Move newly allocated heap number to v0.
2229 // Convert the int32 in a1 to the heap number in v0. a2 is corrupted.
2238 WriteInt32ToHeapNumberStub stub(a1, v0, a2, a3);
2306 __ li(a2, Operand(Smi::FromInt(MinorKey())));
2309 __ Push(a2, a1, a0);
2556 // Load left and right operands into f12 and f14 or a0/a1 and a2/a3
2630 __ SmiUntag(a2, right);
2632 // Convert operands to 32-bit integers. Right in a2 and left in a3.
2644 a2,
2655 __ Or(a2, a3, Operand(a2));
2658 __ Xor(a2, a3, Operand(a2));
2661 __ And(a2, a3, Operand(a2));
2665 __ GetLeastBitsFromInt32(a2, a2, 5);
2666 __ srav(a2, a3, a2);
2670 __ GetLeastBitsFromInt32(a2, a2, 5);
2671 __ srlv(a2, a3, a2);
2677 __ Branch(&result_not_a_smi, lt, a2, Operand(zero_reg));
2679 __ Branch(not_numbers, lt, a2, Operand(zero_reg));
2684 __ GetLeastBitsFromInt32(a2, a2, 5);
2685 __ sllv(a2, a3, a2);
2691 __ Addu(a3, a2, Operand(0x40000000));
2693 __ SmiTag(v0, a2);
2707 // a2: Answer as signed int32.
2715 // Convert the int32 in a2 to the heap number in a0. As
2718 __ mtc1(a2, f0);
2730 // Tail call that writes the int32 in a2 to the heap number in v0, using
2732 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
2822 __ GetObjectType(left, a2, a2);
2823 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE));
2827 __ GetObjectType(right, a2, a2);
2828 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE));
2885 a2,
3025 // Convert operands to 32-bit integers. Right in a2 and left in a3. The
3039 a2,
3051 __ Or(a2, a3, Operand(a2));
3054 __ Xor(a2, a3, Operand(a2));
3057 __ And(a2, a3, Operand(a2));
3060 __ And(a2, a2, Operand(0x1f));
3061 __ srav(a2, a3, a2);
3064 __ And(a2, a2, Operand(0x1f));
3065 __ srlv(a2, a3, a2);
3067 // We only get a negative result if the shift value (a2) is 0.
3077 a2,
3084 a2,
3089 __ And(a2, a2, Operand(0x1f));
3090 __ sllv(a2, a3, a2);
3097 __ Addu(scratch1, a2, Operand(0x40000000));
3101 __ SmiTag(v0, a2);
3118 __ mtc1(a2, double_scratch);
3122 __ mtc1(a2, double_scratch);
3131 // Tail call that writes the int32 in a2 to the heap number in v0, using
3134 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
3227 __ GetObjectType(left, a2, a2);
3228 __ Branch(&left_not_string, ge, a2, Operand(FIRST_NONSTRING_TYPE));
3237 __ GetObjectType(right, a2, a2);
3238 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE));
3355 // of the double into a2, a3.
3359 __ Move(a2, a3, f4);
3370 // low and high words into a2, a3.
3371 __ lw(a2, FieldMemOperand(a0, HeapNumber::kValueOffset));
3375 __ Move(a2, a3, f4);
3378 // a2 = low 32 bits of double value.
3382 __ Xor(a1, a2, a3);
3390 // a2 = low 32 bits of double value.
3428 __ Branch(&calculate, ne, a2, Operand(t0));
3466 __ Push(cache_entry, a2, a3);
3472 __ Pop(cache_entry, a2, a3);
3477 __ sw(a2, MemOperand(cache_entry, 0 * kPointerSize));
3587 const Register exponent = a2;
3875 __ li(a2, Operand(ExternalReference::isolate_address()));
3911 // It's okay to clobber a2 and a3 here. v0 & v1 contain result.
3912 __ li(a2, Operand(scope_depth));
3913 __ lw(a3, MemOperand(a2));
3915 __ sw(a3, MemOperand(a2));
3921 __ addiu(a2, v0, 1);
3922 __ andi(t0, a2, kFailureTagMask);
3985 // NOTE: s0-s2 hold the arguments of this function instead of a0-a2.
4037 __ li(a2, Operand(external_caught));
4038 __ sw(a0, MemOperand(a2));
4043 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4045 __ sw(v0, MemOperand(a2));
4063 // a2: receiver
4106 // a2: receiver_pointer
4173 // a2: receiver_pointer
4255 const Register scratch = a2;
4310 // Get prototype of object into a2.
4431 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4432 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4454 __ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4460 __ Addu(a3, a2, Operand(t3));
4479 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
4482 a2,
4486 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
4487 __ sw(a2, MemOperand(sp, 0 * kPointerSize));
4488 __ sll(t3, a2, 1);
4514 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
4517 a2,
4521 __ mov(a2, a1);
4527 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
4528 __ sll(t6, a2, 1);
4534 // a2 = argument count (tagged)
4535 // Compute the mapped parameter count = min(a1, a2) in a1.
4537 __ Branch(&skip_min, lt, a1, Operand(a2));
4538 __ mov(a1, a2);
4557 __ sll(t6, a2, 1);
4568 // a2 = argument count (tagged)
4588 // a2 = argument count (tagged)
4607 __ sw(a2, FieldMemOperand(v0, kLengthOffset));
4617 // a2 = argument count (tagged)
4680 // a2 = argument count (tagged)
4686 __ sw(a2, FieldMemOperand(a3, FixedArray::kLengthOffset));
4704 __ Branch(&arguments_loop, lt, t5, Operand(a2));
4710 // a2 = argument count (tagged)
4712 __ sw(a2, MemOperand(sp, 0 * kPointerSize)); // Patch argument count.
4723 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4724 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4736 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4739 __ Addu(a3, a2, Operand(at));
4758 a2,
4783 __ lw(a2, MemOperand(sp, 1 * kPointerSize));
4800 // Pre-decrement a2 with kPointerSize on each iteration.
4802 __ Addu(a2, a2, Operand(-kPointerSize));
4803 __ lw(a3, MemOperand(a2));
4892 __ lw(a2,
4898 __ Addu(a2, a2, Operand(2)); // a2 was a smi.
4900 __ Branch(&runtime, hi, a2, Operand(OffsetsVector::kStaticOffsetsVectorSize));
4902 // a2: Number of capture registers
4915 // a2: Number of capture registers
4925 // a2: Number of capture registers
4943 __ Addu(a2, a2, Operand(RegExpImpl::kLastMatchOverhead));
4945 __ Branch(&runtime, gt, a2, Operand(at));
5043 1, a0, a2);
5075 __ li(a2, Operand(address_of_regexp_stack_memory_size));
5076 __ lw(a2, MemOperand(a2, 0));
5077 __ addu(a0, a0, a2);
5096 // Argument 3, a2: Start of string data
5101 __ addu(a2, t0, t1);
5138 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
5140 __ lw(v0, MemOperand(a2, 0));
5143 __ sw(a1, MemOperand(a2, 0)); // Clear pending exception.
5172 __ sll(a2, a1, kSmiTagSize + kSmiShiftSize); // To smi.
5173 __ sw(a2, FieldMemOperand(last_match_info_elements,
5179 __ mov(a2, subject);
5182 a2,
5199 __ li(a2, Operand(address_of_static_offsets_vector));
5202 // a2: offsets vector
5213 __ lw(a3, MemOperand(a2, 0));
5214 __ addiu(a2, a2, kPointerSize);
5275 __ Addu(a2, t1, Operand(objects_size));
5277 a2, // In: Size, in words.
5291 __ lw(a2, ContextOperand(cp, Context::GLOBAL_INDEX));
5294 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset));
5296 __ lw(a2, ContextOperand(a2, Context::REGEXP_RESULT_MAP_INDEX));
5298 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
5302 __ lw(a2, MemOperand(sp, kPointerSize * 1));
5305 __ sw(a2, FieldMemOperand(v0, JSRegExpResult::kIndexOffset));
5314 __ li(a2, Operand(masm->isolate()->factory()->fixed_array_map()));
5315 __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset));
5320 __ li(a2, Operand(masm->isolate()->factory()->the_hole_value()));
5324 // a2: the hole.
5332 __ sw(a2, MemOperand(a3));
5349 // a2 : cache cell for call target
5358 __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
5374 __ sw(a1, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
5380 __ sw(at, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
5388 // a2 : cache cell for call target
5403 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
5404 a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
5405 __ sw(a2, MemOperand(sp, argc_ * kPointerSize));
5413 __ GetObjectType(a1, a2, a2);
5414 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
5440 __ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE));
5443 __ li(a2, Operand(0, RelocInfo::NONE));
5457 __ mov(a2, zero_reg);
5468 // a2 : cache cell for call target
5482 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
5483 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kConstructStubOffset));
5484 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
5500 __ li(a2, Operand(0, RelocInfo::NONE));
6064 __ lw(a2, MemOperand(sp, kToOffset));
6072 __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
6074 // Both a2 and a3 are untagged integers.
6078 __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to.
6079 __ Subu(a2, a2, a3);
6093 // a2: result string length
6096 __ Branch(&return_v0, eq, a2, Operand(t0));
6103 __ Branch(&result_longer_than_two, gt, a2, Operand(t0));
6104 __ Branch(&runtime, lt, a2, Operand(t0));
6119 // a2: result string length.
6122 __ AllocateAsciiString(v0, a2, t0, t1, t4, &runtime);
6132 // a2: length
6174 // a2: length
6177 __ Branch(©_routine, lt, a2, Operand(SlicedString::kMinLength));
6188 __ AllocateAsciiSlicedString(v0, a2, t2, t3, &runtime);
6191 __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
6203 // a2: length
6232 __ AllocateAsciiString(v0, a2, t0, t2, t3, &runtime);
6242 // a2: result string length
6246 masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED);
6251 __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
6262 // a2: result length.
6266 masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
6412 __ IncrementCounter(counters->string_compare_native(), 1, a1, a2);
6418 __ JumpIfNotBothSequentialAsciiStrings(a1, a0, a2, a3, &runtime);
6421 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3);
6423 GenerateCompareFlatAsciiStrings(masm, a1, a0, a2, a3, t0, t1);
6463 masm, 1 * kPointerSize, a0, a2, a3, t0, t1, &call_builtin);
6468 masm, 0 * kPointerSize, a1, a2, a3, t0, t1, &call_builtin);
6485 __ lw(a2, FieldMemOperand(a0, String::kLengthOffset));
6488 __ Movz(v0, a1, a2); // If first is empty, return second (from a1).
6489 __ slt(t4, zero_reg, a2); // if (a2 > 0) t4 = 1.
6494 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6501 __ sra(a2, a2, kSmiTagSize);
6507 // a2: length of first string
6515 __ Addu(t2, a2, Operand(a3));
6531 __ lbu(a2, FieldMemOperand(a0, SeqAsciiString::kHeaderSize));
6538 masm, a2, a3, t2, t3, t0, t1, t5, &make_two_character_string);
6539 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6544 // are combined into single halfword in a2 register.
6550 __ sh(a2, FieldMemOperand(v0, SeqAsciiString::kHeaderSize));
6551 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6585 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6613 // a2: length of first string
6665 // a2: length of first string
6678 // a2: length of first string.
6682 StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, true);
6685 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6694 // a2: length of first string.
6697 StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, false);
6701 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6764 __ Or(a2, a1, a0);
6765 __ JumpIfNotSmi(a2, &miss);
6789 __ And(a2, a1, Operand(a0));
6790 __ JumpIfSmi(a2, &generic_stub);
6792 __ GetObjectType(a0, a2, a2);
6793 __ Branch(&maybe_undefined1, ne, a2, Operand(HEAP_NUMBER_TYPE));
6794 __ GetObjectType(a1, a2, a2);
6795 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
6803 __ Subu(a2, a1, Operand(kHeapObjectTag));
6804 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset));
6805 __ Subu(a2, a0, Operand(kHeapObjectTag));
6806 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset));
6839 __ GetObjectType(a1, a2, a2);
6840 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
6862 Register tmp1 = a2;
6902 Register tmp1 = a2;
6981 __ And(a2, a1, Operand(a0));
6982 __ JumpIfSmi(a2, &miss);
6984 __ GetObjectType(a0, a2, a2);
6985 __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
6986 __ GetObjectType(a1, a2, a2);
6987 __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
7000 __ And(a2, a1, a0);
7001 __ JumpIfSmi(a2, &miss);
7002 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
7004 __ Branch(&miss, ne, a2, Operand(known_map_));
7028 __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
7032 __ Jump(a2);
7160 a2.bit() | a1.bit() | a0.bit() | v0.bit());
7233 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
7247 __ mov(scratch2, a2);
7271 Register index = a2;
7362 { REG(s2), REG(a2), REG(t3), EMIT_REMEMBERED_SET },
7368 { REG(t0), REG(a1), REG(a2), OMIT_REMEMBERED_SET },
7370 { REG(a1), REG(a2), REG(a3), EMIT_REMEMBERED_SET },
7371 { REG(a3), REG(a2), REG(a1), EMIT_REMEMBERED_SET },
7373 { REG(a2), REG(a1), REG(a3), EMIT_REMEMBERED_SET },
7374 { REG(a3), REG(a1), REG(a2), EMIT_REMEMBERED_SET },
7376 { REG(a3), REG(a2), REG(t0), EMIT_REMEMBERED_SET },
7377 { REG(a2), REG(a3), REG(t0), EMIT_REMEMBERED_SET },
7381 { REG(a2), REG(a3), REG(t5), EMIT_REMEMBERED_SET },
7382 { REG(a2), REG(a3), REG(t5), OMIT_REMEMBERED_SET },
7384 { REG(t2), REG(a2), REG(a0), EMIT_REMEMBERED_SET },
7385 { REG(a2), REG(t2), REG(t5), EMIT_REMEMBERED_SET },
7534 __ li(a2, Operand(ExternalReference::isolate_address()));
7633 // -- a2 : map of array literal
7644 __ CheckFastElements(a2, t1, &double_elements);
7647 __ CheckFastSmiOnlyElements(a2, t1, &fast_elements);
7685 __ StoreNumberToDoubleElements(a0, a3, a1, t1, t2, t3, t5, a2,