Lines Matching defs:a2
55 static Register registers[] = { a3, a2, a1 };
66 static Register registers[] = { a3, a2, a1, a0 };
77 static Register registers[] = { a2 };
118 static Register registers[] = { a2, a1, a0 };
158 // a2 -- type info cell with elements kind
159 static Register registers[] = { a1, a2 };
253 static Register registers[] = { a1, a2, a0 };
264 static Register registers[] = { a0, a3, a1, a2 };
324 __ Allocate(JSFunction::kSize, v0, a1, a2, &gc, TAG_OBJECT);
332 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
333 __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset));
334 __ lw(t1, MemOperand(a2, Context::SlotOffset(map_index)));
373 // a2 holds native context, a1 points to fixed array of 3-element entries
380 __ Branch(&install_optimized, eq, a2, Operand(t1));
394 __ Branch(&loop, ne, a2, Operand(t1));
413 __ lw(t0, ContextOperand(a2, Context::OPTIMIZED_FUNCTIONS_LIST));
418 __ sw(v0, ContextOperand(a2, Context::OPTIMIZED_FUNCTIONS_LIST));
423 a2,
447 __ Allocate(FixedArray::SizeFor(length), v0, a1, a2, &gc, TAG_OBJECT);
454 __ li(a2, Operand(Smi::FromInt(length)));
455 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
459 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
464 __ sw(a2, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
491 __ Allocate(FixedArray::SizeFor(length), v0, a1, a2, &gc, TAG_OBJECT);
500 __ LoadRoot(a2, Heap::kBlockContextMapRootIndex);
501 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
502 __ li(a2, Operand(Smi::FromInt(length)));
503 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
520 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
524 __ sw(a2, ContextOperand(v0, Context::GLOBAL_OBJECT_INDEX));
652 __ Move(a2, a3, f14);
885 // a2: Right value (least significant part of mantissa).
902 __ Move(f14, a2, a3);
929 scratch_.is(a2) &&
933 if (the_int_.is(a2) &&
948 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
949 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
1175 // Get the type of the first operand into a2 and compare it with
1177 __ GetObjectType(lhs, a2, a2);
1178 __ Branch(&first_non_object, less, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
1188 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE));
1199 __ Or(a2, a2, Operand(a3));
1200 __ And(at, a2, Operand(kIsNotStringMask | kIsNotInternalizedMask));
1211 __ GetObjectType(lhs, a3, a2);
1212 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE));
1213 __ lw(a2, FieldMemOperand(rhs, HeapObject::kMapOffset));
1215 __ Branch(slow, ne, a3, Operand(a2));
1235 // a2 is object type of rhs.
1238 __ And(at, a2, Operand(kIsNotStringMask));
1240 __ And(at, a2, Operand(kIsNotInternalizedMask));
1253 __ Branch(not_both_strings, lt, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
1254 __ GetObjectType(rhs, a2, a3);
1261 __ lbu(a2, FieldMemOperand(a2, Map::kBitFieldOffset));
1263 __ and_(a0, a2, a3);
1360 GenerateLookupNumberStringCache(masm, a1, v0, a2, a3, t0, &runtime);
1388 // On entry a1 and a2 are the values to be compared.
1397 ICCompareStub_CheckInputType(masm, lhs, a2, left_, &miss);
1404 __ Or(a2, a1, a0);
1405 __ JumpIfNotSmi(a2, ¬_two_smis);
1432 // or in GP registers (a0, a1, a2, a3) depending on the presence of the FPU.
1438 // and the right hand side if we have FPU. Otherwise a2, a3 represent
1492 // In this case a2 will contain the type of lhs_.
1505 // Assumes that a2 is the type of lhs_ on entry.
1514 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, a2, a3, &slow);
1516 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3);
1521 a2,
1528 a2,
1602 __ li(a2, Operand(Smi::FromInt(MinorKey())));
1603 __ push(a2);
1819 // Load left and right operands into f12 and f14 or a0/a1 and a2/a3
1835 // Load right operand to f14 or a2/a3.
1838 masm, right, destination, f14, f16, a2, a3, heap_number_map,
1843 masm, destination, right, f14, a2, a3, heap_number_map,
1908 __ SmiUntag(a2, right);
1910 // Convert operands to 32-bit integers. Right in a2 and left in a3.
1922 a2,
1933 __ Or(a2, a3, Operand(a2));
1936 __ Xor(a2, a3, Operand(a2));
1939 __ And(a2, a3, Operand(a2));
1943 __ GetLeastBitsFromInt32(a2, a2, 5);
1944 __ srav(a2, a3, a2);
1948 __ GetLeastBitsFromInt32(a2, a2, 5);
1949 __ srlv(a2, a3, a2);
1954 __ Branch(&result_not_a_smi, lt, a2, Operand(zero_reg));
1958 __ GetLeastBitsFromInt32(a2, a2, 5);
1959 __ sllv(a2, a3, a2);
1965 __ Addu(a3, a2, Operand(0x40000000));
1968 __ SmiTag(v0, a2);
1982 // a2: Answer as signed int32.
1988 // Convert the int32 in a2 to the heap number in a0. As
1990 __ mtc1(a2, f0);
2098 __ GetObjectType(left, a2, a2);
2099 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE));
2103 __ GetObjectType(right, a2, a2);
2104 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE));
2171 a2,
2310 // Convert operands to 32-bit integers. Right in a2 and left in a3. The
2325 a2,
2338 __ Or(a2, a3, Operand(a2));
2341 __ Xor(a2, a3, Operand(a2));
2344 __ And(a2, a3, Operand(a2));
2347 __ And(a2, a2, Operand(0x1f));
2348 __ srav(a2, a3, a2);
2351 __ And(a2, a2, Operand(0x1f));
2352 __ srlv(a2, a3, a2);
2354 // We only get a negative result if the shift value (a2) is 0.
2361 a2,
2365 __ And(a2, a2, Operand(0x1f));
2366 __ sllv(a2, a3, a2);
2373 __ Addu(scratch1, a2, Operand(0x40000000));
2378 __ SmiTag(v0, a2);
2392 __ mtc1(a2, double_scratch);
2396 __ mtc1(a2, double_scratch);
2521 __ GetObjectType(left, a2, a2);
2522 __ Branch(&left_not_string, ge, a2, Operand(FIRST_NONSTRING_TYPE));
2532 __ GetObjectType(right, a2, a2);
2533 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE));
2605 // of the double into a2, a3.
2609 __ Move(a2, a3, f4);
2620 // low and high words into a2, a3.
2621 __ lw(a2, FieldMemOperand(a0, HeapNumber::kValueOffset));
2625 __ Move(a2, a3, f4);
2628 // a2 = low 32 bits of double value.
2632 __ Xor(a1, a2, a3);
2640 // a2 = low 32 bits of double value.
2678 __ Branch(&calculate, ne, a2, Operand(t0));
2709 // a2 and a3: parts of the double value.
2710 // Store a0, a2 and a3 on stack for later before calling C function.
2711 __ Push(a3, a2, cache_entry);
2717 __ Pop(a3, a2, cache_entry);
2722 __ sw(a2, MemOperand(cache_entry, 0 * kPointerSize));
2831 const Register exponent = a2;
3144 __ li(a2, Operand(ExternalReference::isolate_address(isolate)));
3180 // It's okay to clobber a2 and a3 here. v0 & v1 contain result.
3181 __ li(a2, Operand(scope_depth));
3182 __ lw(a3, MemOperand(a2));
3184 __ sw(a3, MemOperand(a2));
3190 __ addiu(a2, v0, 1);
3191 __ andi(t0, a2, kFailureTagMask);
3256 // NOTE: s0-s2 hold the arguments of this function instead of a0-a2.
3308 __ li(a2, Operand(external_caught));
3309 __ sw(a0, MemOperand(a2));
3317 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3319 __ sw(v0, MemOperand(a2));
3337 // a2: receiver
3377 // a2: receiver_pointer
3444 // a2: receiver_pointer
3523 const Register scratch = a2;
3578 // Get prototype of object into a2.
3696 // -- a2 : name
3726 // -- a2 : name
3757 // -- a2 : receiver
3761 receiver = a2;
3769 // -- a2 : key
3832 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3833 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3855 __ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3861 __ Addu(a3, a2, Operand(t3));
3880 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
3883 a2,
3887 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
3888 __ sw(a2, MemOperand(sp, 0 * kPointerSize));
3889 __ sll(t3, a2, 1);
3915 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
3918 a2,
3922 __ mov(a2, a1);
3928 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
3929 __ sll(t6, a2, 1);
3935 // a2 = argument count (tagged)
3936 // Compute the mapped parameter count = min(a1, a2) in a1.
3938 __ Branch(&skip_min, lt, a1, Operand(a2));
3939 __ mov(a1, a2);
3958 __ sll(t6, a2, 1);
3969 // a2 = argument count (tagged)
3989 // a2 = argument count (tagged)
4008 __ sw(a2, FieldMemOperand(v0, kLengthOffset));
4018 // a2 = argument count (tagged)
4081 // a2 = argument count (tagged)
4087 __ sw(a2, FieldMemOperand(a3, FixedArray::kLengthOffset));
4105 __ Branch(&arguments_loop, lt, t5, Operand(a2));
4111 // a2 = argument count (tagged)
4113 __ sw(a2, MemOperand(sp, 0 * kPointerSize)); // Patch argument count.
4124 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4125 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4137 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4140 __ Addu(a3, a2, Operand(at));
4157 __ Allocate(a1, v0, a2, a3, &runtime,
4179 __ lw(a2, MemOperand(sp, 1 * kPointerSize));
4196 // Pre-decrement a2 with kPointerSize on each iteration.
4198 __ Addu(a2, a2, Operand(-kPointerSize));
4199 __ lw(a3, MemOperand(a2));
4287 __ lw(a2,
4291 // Multiplying by 2 comes for free since a2 is smi-tagged.
4296 &runtime, hi, a2, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize - 2));
4401 1, a0, a2);
4434 __ li(a2, Operand(address_of_regexp_stack_memory_size));
4435 __ lw(a2, MemOperand(a2, 0));
4436 __ addu(a0, a0, a2);
4460 // Argument 3, a2: Start of string data
4465 __ addu(a2, t0, t1);
4502 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4504 __ lw(v0, MemOperand(a2, 0));
4507 __ sw(a1, MemOperand(a2, 0)); // Clear pending exception.
4536 __ GetObjectType(a0, a2, a2);
4537 __ Branch(&runtime, ne, a2, Operand(JS_ARRAY_TYPE));
4548 __ Addu(a2, a1, Operand(RegExpImpl::kLastMatchOverhead));
4550 __ Branch(&runtime, gt, a2, Operand(at));
4555 __ sll(a2, a1, kSmiTagSize + kSmiShiftSize); // To smi.
4556 __ sw(a2, FieldMemOperand(last_match_info_elements,
4562 __ mov(a2, subject);
4569 __ mov(subject, a2);
4583 __ li(a2, Operand(address_of_static_offsets_vector));
4586 // a2: offsets vector
4597 __ lw(a3, MemOperand(a2, 0));
4598 __ addiu(a2, a2, kPointerSize);
4677 __ Addu(a2, t1, Operand(objects_size));
4679 a2, // In: Size, in words.
4693 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4696 __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset));
4698 __ lw(a2, ContextOperand(a2, Context::REGEXP_RESULT_MAP_INDEX));
4700 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
4704 __ lw(a2, MemOperand(sp, kPointerSize * 1));
4707 __ sw(a2, FieldMemOperand(v0, JSRegExpResult::kIndexOffset));
4716 __ li(a2, Operand(masm->isolate()->factory()->fixed_array_map()));
4717 __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset));
4722 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4726 // a2: undefined.
4734 __ sw(a2, MemOperand(a3));
4751 // a2 : cache cell for call target
4760 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
4792 __ sw(at, FieldMemOperand(a2, Cell::kValueOffset));
4809 1 << 6; // a2
4823 __ sw(a1, FieldMemOperand(a2, Cell::kValueOffset));
4832 // a2 : cache cell for call target
4895 __ sw(at, FieldMemOperand(a2, Cell::kValueOffset));
4901 __ li(a2, Operand(0, RelocInfo::NONE32));
4915 __ mov(a2, zero_reg);
4926 // a2 : cache cell for call target
4960 __ li(a2, Operand(0, RelocInfo::NONE32));
5468 __ lw(a2, MemOperand(sp, kToOffset));
5476 __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
5478 // Both a2 and a3 are untagged integers.
5482 __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to.
5483 __ Subu(a2, a2, a3);
5495 __ Branch(&single_char, eq, a2, Operand(1));
5500 // a2: result string length
5504 __ Branch(&return_v0, eq, a2, Operand(t0));
5506 __ Branch(&runtime, hi, a2, Operand(t0));
5513 // a2: length
5555 // a2: length
5558 __ Branch(©_routine, lt, a2, Operand(SlicedString::kMinLength));
5569 __ AllocateAsciiSlicedString(v0, a2, t2, t3, &runtime);
5572 __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
5584 // a2: length
5613 __ AllocateAsciiString(v0, a2, t0, t2, t3, &runtime);
5623 // a2: result string length
5627 masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED);
5632 __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
5643 // a2: result length.
5647 masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
5661 // a2: length
5665 v0, a3, a2, v0, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
5807 __ IncrementCounter(counters->string_compare_native(), 1, a1, a2);
5813 __ JumpIfNotBothSequentialAsciiStrings(a1, a0, a2, a3, &runtime);
5816 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3);
5818 GenerateCompareFlatAsciiStrings(masm, a1, a0, a2, a3, t0, t1);
5859 masm, 1 * kPointerSize, a0, a2, a3, t0, t1, &call_builtin);
5864 masm, 0 * kPointerSize, a1, a2, a3, t0, t1, &call_builtin);
5880 __ lw(a2, FieldMemOperand(a0, String::kLengthOffset));
5883 __ Movz(v0, a1, a2); // If first is empty, return second (from a1).
5884 __ slt(t4, zero_reg, a2); // if (a2 > 0) t4 = 1.
5889 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
5896 __ sra(a2, a2, kSmiTagSize);
5902 // a2: length of first string
5910 __ Addu(t2, a2, Operand(a3));
5926 __ lbu(a2, FieldMemOperand(a0, SeqOneByteString::kHeaderSize));
5933 masm, a2, a3, t2, t3, t0, t1, t5, &make_two_character_string);
5934 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
5939 // are combined into single halfword in a2 register.
5945 __ sh(a2, FieldMemOperand(v0, SeqOneByteString::kHeaderSize));
5946 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6008 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6036 // a2: length of first string
6088 // a2: length of first string
6101 // a2: length of first string.
6105 StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, true);
6108 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6117 // a2: length of first string.
6120 StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, false);
6124 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6220 __ Or(a2, a1, a0);
6221 __ JumpIfNotSmi(a2, &miss);
6259 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
6261 __ Subu(a2, a0, Operand(kHeapObjectTag));
6262 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset));
6265 __ SmiUntag(a2, a0); // Can't clobber a0 yet.
6267 __ mtc1(a2, single_scratch);
6272 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
6274 __ Subu(a2, a1, Operand(kHeapObjectTag));
6275 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset));
6278 __ SmiUntag(a2, a1); // Can't clobber a1 yet.
6280 __ mtc1(a2, single_scratch);
6317 __ GetObjectType(a1, a2, a2);
6318 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
6340 Register tmp1 = a2;
6381 Register tmp1 = a2;
6426 Register tmp1 = a2;
6506 __ And(a2, a1, Operand(a0));
6507 __ JumpIfSmi(a2, &miss);
6509 __ GetObjectType(a0, a2, a2);
6510 __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
6511 __ GetObjectType(a1, a2, a2);
6512 __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
6525 __ And(a2, a1, a0);
6526 __ JumpIfSmi(a2, &miss);
6527 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
6529 __ Branch(&miss, ne, a2, Operand(known_map_));
6554 __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
6558 __ Jump(a2);
6675 a2.bit() | a1.bit() | a0.bit() | v0.bit());
6747 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
6761 __ mov(scratch2, a2);
6785 Register index = a2;
6881 { REG(t0), REG(a1), REG(a2), OMIT_REMEMBERED_SET },
6883 { REG(a1), REG(a2), REG(a3), EMIT_REMEMBERED_SET },
6884 { REG(a3), REG(a2), REG(a1), EMIT_REMEMBERED_SET },
6886 { REG(a2), REG(a1), REG(a3), EMIT_REMEMBERED_SET },
6887 { REG(a3), REG(a1), REG(a2), EMIT_REMEMBERED_SET },
6889 { REG(a3), REG(a2), REG(t0), EMIT_REMEMBERED_SET },
6890 { REG(a2), REG(a3), REG(t0), EMIT_REMEMBERED_SET },
6894 { REG(a2), REG(a3), REG(t5), EMIT_REMEMBERED_SET },
6895 { REG(a2), REG(a3), REG(t5), OMIT_REMEMBERED_SET },
6897 { REG(t2), REG(a2), REG(a0), EMIT_REMEMBERED_SET },
6898 { REG(a2), REG(t2), REG(t5), EMIT_REMEMBERED_SET },
6902 { REG(a2), REG(t0), REG(a1), EMIT_REMEMBERED_SET },
7051 __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate())));
7162 // clobbers a1, a2, t0
7174 __ lw(a2, FieldMemOperand(a1, JSObject::kMapOffset));
7176 __ CheckFastElements(a2, t1, &double_elements);
7179 __ CheckFastSmiElements(a2, t1, &fast_elements);
7219 t1, t2, t3, t5, a2,
7329 // a2 - type info cell
7354 __ Branch(&normal_sequence, eq, a2, Operand(at));
7355 __ lw(t1, FieldMemOperand(a2, Cell::kValueOffset));
7362 __ lw(t1, FieldMemOperand(a2, Cell::kValueOffset));
7428 // -- a2 : type info cell
7446 // We should either have undefined in a2 or a valid cell.
7450 __ Branch(&okay_here, eq, a2, Operand(at));
7451 __ lw(a3, FieldMemOperand(a2, 0));
7460 __ Branch(&no_info, eq, a2, Operand(at));
7461 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));