Home | History | Annotate | Download | only in mips

Lines Matching refs:a3

77   static Register registers[] = { a3, a2, a1 };
88 static Register registers[] = { a3, a2, a1, a0 };
323 static Register registers[] = { a0, a3, a1, a2 };
392 __ lw(a3, MemOperand(sp, 0));
403 __ sw(a3, MemOperand(v0, Context::SlotOffset(Context::CLOSURE_INDEX)));
436 __ lw(a3, MemOperand(sp, 0));
452 __ JumpIfNotSmi(a3, &after_sentinel);
454 __ Assert(eq, kExpected0AsASmiSentinel, a3, Operand(zero_reg));
456 __ lw(a3, GlobalObjectOperand());
457 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
458 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX));
463 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX));
719 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
720 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
961 __ GetObjectType(rhs, a3, a3);
962 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
965 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE));
970 __ Or(a2, a2, Operand(a3));
982 __ GetObjectType(lhs, a3, a2);
986 __ Branch(slow, ne, a3, Operand(a2));
1013 __ GetObjectType(rhs, a3, a3);
1014 __ Branch(not_both_strings, ge, a3, Operand(FIRST_NONSTRING_TYPE));
1015 __ And(at, a3, Operand(kIsNotInternalizedMask));
1025 __ GetObjectType(rhs, a2, a3);
1026 __ Branch(not_both_strings, lt, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1031 __ lw(a3, FieldMemOperand(lhs, HeapObject::kMapOffset));
1033 __ lbu(a3, FieldMemOperand(a3, Map::kBitFieldOffset));
1034 __ and_(a0, a2, a3);
1070 ICCompareStub_CheckInputType(masm, rhs, a3, right_, &miss);
1104 // or in GP registers (a0, a1, a2, a3) depending on the presence of the FPU.
1110 // and the right hand side if we have FPU. Otherwise a2, a3 represent
1186 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, a2, a3, &slow);
1188 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3);
1194 a3,
1201 a3,
1297 // of the double into a2, a3.
1301 __ Move(a2, a3, f4);
1312 // low and high words into a2, a3.
1314 __ lw(a3, FieldMemOperand(a0, HeapNumber::kValueOffset + 4));
1317 __ Move(a2, a3, f4);
1321 // a3 = high 32 bits of double value.
1324 __ Xor(a1, a2, a3);
1333 // a3 = high 32 bits of double value.
1371 __ Branch(&calculate, ne, a3, Operand(t1));
1401 // a2 and a3: parts of the double value.
1402 // Store a0, a2 and a3 on stack for later before calling C function.
1403 __ Push(a3, a2, cache_entry);
1409 __ Pop(a3, a2, cache_entry);
1415 __ sw(a3, MemOperand(cache_entry, 1 * kPointerSize));
1854 // It's okay to clobber a2 and a3 here. v0 & v1 contain result.
1856 __ lw(a3, MemOperand(a2));
1857 __ Subu(a3, a3, Operand(1));
1858 __ sw(a3, MemOperand(a2));
1896 __ li(a3, Operand(isolate->factory()->the_hole_value()));
1899 __ sw(a3, MemOperand(t0));
2012 // a3: argc
2052 // a3: argc
2119 // a3: argc
2193 Register map = a3; // Map of the object.
2251 // Register mapping: a3 is object map and t0 is function prototype.
2378 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, a3, t0, &miss);
2408 StubCompiler::GenerateLoadStringLength(masm, receiver, a3, t0, &miss);
2447 Register scratch = a3;
2506 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
2509 a3,
2518 __ subu(a3, a0, a1);
2519 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize);
2520 __ Addu(a3, fp, Operand(t3));
2522 __ lw(v0, MemOperand(a3, kDisplacement));
2532 __ subu(a3, a0, a1);
2533 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize);
2534 __ Addu(a3, a2, Operand(t3));
2536 __ lw(v0, MemOperand(a3, kDisplacement));
2552 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2553 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
2560 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
2563 __ Addu(a3, a3, Operand(t3));
2564 __ addiu(a3, a3, StandardFrameConstants::kCallerSPOffset);
2565 __ sw(a3, MemOperand(sp, 1 * kPointerSize));
2587 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2588 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
2601 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
2603 __ Addu(a3, a3, Operand(t6));
2604 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
2605 __ sw(a3, MemOperand(sp, 1 * kPointerSize));
2639 __ Allocate(t5, v0, a3, t0, &runtime, TAG_OBJECT);
2666 __ lw(a3, FieldMemOperand(t0, i));
2667 __ sw(a3, FieldMemOperand(v0, i));
2672 __ lw(a3, MemOperand(sp, 2 * kPointerSize));
2675 __ sw(a3, FieldMemOperand(v0, kCalleeOffset));
2697 // Move backing store address to a3, because it is
2699 __ mov(a3, t0);
2729 __ Addu(a3, t0, Operand(t6));
2730 __ Addu(a3, a3, Operand(kParameterMapHeaderSize));
2734 // a3 = address of backing store (tagged)
2747 __ Addu(t6, a3, t1);
2755 // a3 = address of backing store (tagged)
2759 __ sw(t1, FieldMemOperand(a3, FixedArray::kMapOffset));
2760 __ sw(a2, FieldMemOperand(a3, FixedArray::kLengthOffset));
2773 __ Addu(t1, a3, Operand(t6));
2798 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
2801 a3,
2813 __ Addu(a3, a2, Operand(at));
2815 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
2816 __ sw(a3, MemOperand(sp, 1 * kPointerSize));
2830 __ Allocate(a1, v0, a2, a3, &runtime,
2840 __ CopyFields(v0, t0, a3.bit(), JSObject::kHeaderSize / kPointerSize);
2858 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex);
2859 __ sw(a3, FieldMemOperand(t0, FixedArray::kMapOffset));
2872 __ lw(a3, MemOperand(a2));
2874 __ sw(a3, MemOperand(t0));
2975 __ mov(a3, subject); // Make a copy of the original subject string.
2979 // a3: subject string
3041 // a3: original subject string
3042 // Load previous index and check range before a3 is overwritten. We have to
3043 // use a3 instead of subject here because subject might have been only made
3047 __ lw(a3, FieldMemOperand(a3, String::kLengthOffset));
3048 __ Branch(&runtime, ls, a3, Operand(a1));
3056 __ sra(a3, a0, 2); // a3 is 1 for ASCII, 0 for UC16 (used below).
3068 // a3: encoding of subject string (1 if ASCII, 0 if two_byte);
3125 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte.
3132 // Argument 4, a3: End of string data
3135 __ sllv(t1, t0, a3);
3137 __ sllv(t1, a1, a3);
3142 __ sllv(t1, t2, a3);
3143 __ addu(a3, t0, t1);
3270 __ lw(a3, MemOperand(a2, 0));
3273 __ sll(a3, a3, kSmiTagSize); // Convert to Smi.
3274 __ sw(a3, MemOperand(a0, 0));
3354 a3, // Scratch register.
3367 __ Addu(a3, v0, Operand(JSRegExpResult::kSize));
3370 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
3385 // a3: FixedArray, tagged.
3390 __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset));
3393 __ sw(t2, FieldMemOperand(a3, FixedArray::kLengthOffset));
3396 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3400 // a3: Start of elements in FixedArray.
3404 __ addu(t1, t1, a3); // Point past last element to store.
3406 __ Branch(&done, ge, a3, Operand(t1)); // Break when a3 past end of elem.
3407 __ sw(a2, MemOperand(a3));
3409 __ addiu(a3, a3, kPointerSize); // In branch delay slot.
3433 // Load the cache state into a3.
3434 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
3438 __ Branch(&done, eq, a3, Operand(a1));
3443 // AllocationSite. Do a map check on the object in a3.
3444 __ lw(t1, FieldMemOperand(a3, 0));
3449 __ LoadArrayFunction(a3);
3450 __ Branch(&megamorphic, ne, a1, Operand(a3));
3458 __ Branch(&initialize, eq, a3, Operand(at));
3470 __ LoadArrayFunction(a3);
3471 __ Branch(&not_array_function, ne, a1, Operand(a3));
3519 __ lw(a3,
3521 __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalReceiverOffset));
3522 __ sw(a3, MemOperand(sp, argc_ * kPointerSize));
3530 __ GetObjectType(a1, a3, a3);
3531 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE));
3570 __ Branch(&non_function, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE));
3574 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
3588 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION);
3604 __ GetObjectType(a1, a3, a3);
3605 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE));
3612 Register jmp_reg = a3;
3621 // a3: object type
3624 __ Branch(&non_function_call, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE));
3625 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
3629 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
4141 __ lw(a3, MemOperand(sp, kFromOffset));
4149 __ UntagAndJumpIfNotSmi(a3, a3, &runtime);
4150 // Both a2 and a3 are untagged integers.
4152 __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0.
4154 __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to.
4155 __ Subu(a2, a2, a3);
4186 // a3: from index (untagged)
4211 __ Addu(a3, a3, t0);
4228 // a3: adjusted start index (untagged)
4246 __ sll(a3, a3, 1);
4248 __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
4257 // a3: adjusted start index (untagged)
4288 __ Addu(t1, t1, a3);
4299 masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED);
4308 __ sll(t0, a3, 1);
4319 masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
4323 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
4334 // a3: from index (untagged)
4335 __ SmiTag(a3, a3);
4337 v0, a3, a2, v0, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
4485 __ JumpIfNotBothSequentialAsciiStrings(a1, a0, a2, a3, &runtime);
4488 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3);
4490 GenerateCompareFlatAsciiStrings(masm, a1, a0, a2, a3, t0, t1);
4531 masm, 1 * kPointerSize, a0, a2, a3, t0, t1, &call_builtin);
4536 masm, 0 * kPointerSize, a1, a2, a3, t0, t1, &call_builtin);
4553 __ lw(a3, FieldMemOperand(a1, String::kLengthOffset));
4557 __ slt(t5, zero_reg, a3); // if (a3 > 0) t5 = 1.
4561 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
4569 __ sra(a3, a3, kSmiTagSize);
4575 // a3: length of second string
4582 __ Addu(t2, a2, Operand(a3));
4599 __ lbu(a3, FieldMemOperand(a1, SeqOneByteString::kHeaderSize));
4605 masm, a2, a3, t2, t3, t0, t1, t5, &make_two_character_string);
4606 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
4618 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
4680 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
4709 // a3: length of second string
4761 // a3: length of second string
4774 // a3: length of second string.
4779 StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, true);
4780 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
4790 // a3: length of second string.
4794 StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, false);
4796 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
4970 Register tmp2 = a3;
5011 Register tmp2 = a3;
5056 Register tmp2 = a3;
5157 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
5159 __ Branch(&miss, ne, a3, Operand(known_map_));
5287 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
5360 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
5399 Register mask = a3;
5694 // -- a3 : element index as smi
5720 __ Push(a1, a3, a0);
5729 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize);
5743 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize);
5752 __ StoreNumberToDoubleElements(a0, a3, t1, t3, t5, a2, &slow_elements);
5876 __ Branch(&next, ne, a3, Operand(kind));
5893 // a3 - kind (if mode != DISABLE_ALLOCATION_SITES)
5907 __ And(at, a3, Operand(1));
5932 __ Addu(a3, a3, Operand(1));
5942 // Save the resulting elements kind in type info. We can't just store a3
5957 __ Branch(&next, ne, a3, Operand(kind));
6057 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
6059 __ SmiTst(a3, at);
6062 __ GetObjectType(a3, a3, t0);
6071 __ lw(a3, FieldMemOperand(a2, 0));
6073 a3, Operand(cell_map));
6081 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
6085 __ lw(t0, FieldMemOperand(a3, 0));
6089 __ lw(a3, FieldMemOperand(a3, AllocationSite::kTransitionInfoOffset));
6090 __ SmiUntag(a3);
6092 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask));
6146 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
6148 __ SmiTst(a3, at);
6151 __ GetObjectType(a3, a3, t0);
6157 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
6159 // Load the map's "bit field 2" into a3. We only need the first byte,
6161 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset));
6163 __ Ext(a3, a3, Map::kElementsKindShift, Map::kElementsKindBitCount);
6167 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS));
6170 a3, Operand(FAST_HOLEY_ELEMENTS));
6175 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS));