Lines Matching defs:a0
70 // The ToNumber stub takes one argument in a0.
72 __ JumpIfNotSmi(a0, &check_heap_number);
74 __ mov(v0, a0);
77 EmitCheckForHeapNumber(masm, a0, a1, t0, &call_builtin);
79 __ mov(v0, a0);
82 __ push(a0);
311 __ lw(a0, MemOperand(sp, 1 * kPointerSize));
313 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
388 __ lw(a0, MemOperand(sp, 2 * kPointerSize));
390 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
399 __ lw(a0, FieldMemOperand(a3, HeapObject::kMapOffset));
400 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceSizeOffset));
401 __ Branch(&slow_case, ne, a0, Operand(size >> kPointerSizeLog2));
524 __ sra(scratch1, a0, kSmiTagSize);
532 __ Move(a0, a1, f12);
536 // Write Smi from a0 to a3 and a2 in double format.
537 __ mov(scratch1, a0);
541 // Write Smi from a1 to a1 and a0 in double format.
543 ConvertToDoubleStub stub2(a1, a0, scratch1, scratch2);
558 // Load right operand (a0) to f12 or a2/a3.
560 a0, f14, a2, a3, heap_number_map, scratch1, scratch2, slow);
562 // Load left operand (a1) to f14 or a0/a1.
564 a1, f12, a0, a1, heap_number_map, scratch1, scratch2, slow);
985 // a0: Left value (least significant part of mantissa).
1000 // function call are prepaired in a0-a3 registers, but function we are
1003 // a0-a3 registers to f12/f14 register pairs.
1004 __ Move(f12, a0, a1);
1040 sign_.is(a0)) {
1052 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
1120 __ Branch(¬_identical, ne, a0, Operand(a1));
1132 __ GetObjectType(a0, t4, t4);
1135 __ GetObjectType(a0, t4, t4);
1146 __ Branch(&return_equal, ne, a0, Operand(t2));
1183 __ lw(t2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1192 __ lw(t3, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
1222 ASSERT((lhs.is(a0) && rhs.is(a1)) ||
1223 (lhs.is(a1) && rhs.is(a0)));
1254 // Write Smi from rhs to a1 and a0 in double format. t5 is scratch.
1256 ConvertToDoubleStub stub1(a1, a0, t6, t5);
1295 // Load rhs to a double in a1, a0.
1296 if (rhs.is(a0)) {
1298 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1300 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1317 __ mov(t0, a0); // a0 has LS 32 bits of rhs.
1384 __ mov(t0, a0); // a0 has LS 32 bits of rhs.
1420 // function call are prepaired in a0-a3 registers, but function we are
1423 // a0-a3 registers to f12/f14 register pairs.
1424 __ Move(f12, a0, a1);
1515 if (rhs.is(a0)) {
1517 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1519 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1533 ASSERT((lhs.is(a0) && rhs.is(a1)) ||
1534 (lhs.is(a1) && rhs.is(a0)));
1565 __ and_(a0, a2, a3);
1566 __ And(a0, a0, Operand(1 << Map::kIsUndetectable));
1568 __ xori(v0, a0, 1 << Map::kIsUndetectable);
1693 __ Or(a2, a1, a0);
1696 __ sra(a0, a0, 1);
1698 __ subu(v0, a1, a0);
1701 __ Or(a2, a1, a0);
1728 // or in GP registers (a0, a1, a2, a3) depending on the presence of the FPU.
1735 // left hand side and a0
1838 // Prepare for call to builtin. Push object pointers, a0 (lhs) first,
1854 __ li(a0, Operand(Smi::FromInt(ncr)));
1855 __ push(a0);
1992 __ li(a0, Operand(ExternalReference::isolate_address()));
2040 // Argument is in a0 and v0 at this point, so we can overwrite a0.
2043 __ li(a0, Operand(Smi::FromInt(operand_type_)));
2044 __ Push(v0, a2, a1, a0);
2086 __ JumpIfNotSmi(a0, non_smi);
2089 __ And(t0, a0, ~0x80000000);
2094 __ subu(v0, zero_reg, a0);
2100 __ JumpIfNotSmi(a0, non_smi);
2103 __ Neg(v0, a0);
2148 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
2149 // a0 is a heap number. Get a new heap number in a1.
2151 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2153 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2162 __ push(a0);
2165 __ pop(a0);
2169 __ lw(a3, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
2170 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2185 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
2186 // Convert the heap number in a0 to an untagged integer in a1.
2187 __ ConvertToInt32(a0, a1, a2, a3, f0, slow);
2287 __ push(a0);
2304 __ Push(a1, a0);
2308 __ li(a0, Operand(Smi::FromInt(operands_type_)));
2309 __ Push(a2, a1, a0);
2379 Register right = a0;
2384 ASSERT(right.is(a0));
2536 Register right = a0;
2556 // Load left and right operands into f12 and f14 or a0/a1 and a2/a3
2715 // Convert the int32 in a2 to the heap number in a0. As
2731 // a3 and a0 as scratch. v0 is preserved and returned.
2732 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
2755 Register right = a0;
2818 Register right = a0;
2843 Register right = a0;
2874 // Jump to type transition if they are not. The registers a0 and a1 (right
2984 // We preserved a0 and a1 to be able to call runtime.
3000 __ Pop(a1, a0);
3026 // registers a0 and a1 (right and left) are preserved for the runtime
3132 // a3 and a0 as scratch. v0 is preserved and returned.
3133 __ mov(a0, t1);
3134 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
3180 __ Branch(&done, ne, a0, Operand(t0));
3182 __ li(a0, Operand(Smi::FromInt(0)));
3184 __ LoadRoot(a0, Heap::kNanValueRootIndex);
3223 Register right = a0;
3301 ASSERT(!result.is(a0) && !result.is(a1));
3305 Register overwritable_operand = mode_ == OVERWRITE_LEFT ? a1 : a0;
3326 __ Push(a1, a0);
3334 // Tagged case: tagged input on top of stack and in a0,
3343 const Register cache_entry = a0;
3350 // Argument is a number and is on stack and in a0.
3352 __ JumpIfNotSmi(a0, &input_not_smi);
3356 __ sra(t0, a0, kSmiTagSize);
3364 __ CheckMap(a0,
3371 __ lw(a2, FieldMemOperand(a0, HeapNumber::kValueOffset));
3372 __ lw(a3, FieldMemOperand(a0, HeapNumber::kValueOffset + 4));
3396 // a0 points to cache array.
3399 // a0 points to the cache for the type type_.
3418 // Find the address of the a1'st entry in the cache, i.e., &a0[a1*12].
3463 // Register a0 holds precalculated cache entry address; preserve
3488 __ AllocateHeapNumber(a0, scratch0, scratch1, t1, &skip_cache);
3489 __ sdc1(f4, FieldMemOperand(a0, HeapNumber::kValueOffset));
3492 __ push(a0);
3526 __ Move(a0, a1, f4);
3850 // Move result passed in v0 into a0 to call PerformGC.
3851 __ mov(a0, v0);
3859 __ li(a0, Operand(scope_depth));
3860 __ lw(a1, MemOperand(a0));
3862 __ sw(a1, MemOperand(a0));
3866 // a0 = argc
3867 __ mov(a0, s0);
3870 // We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We
3967 // Last failure (v0) will be moved to (a0) for parameter when retrying.
3985 // NOTE: s0-s2 hold the arguments of this function instead of a0-a2.
4036 __ li(a0, Operand(false, RelocInfo::NONE));
4038 __ sw(a0, MemOperand(a2));
4061 // a0: entry address
4104 // a0: entry_address
4171 // a0: entry_address
4235 // Uses registers a0 to t0.
4237 // * object: a0 or at sp + 1 * kPointerSize.
4250 const Register object = a0; // Object (lhs).
4395 __ Push(a0, a1);
4401 __ Push(a0, a1);
4404 __ mov(a0, v0);
4406 __ DropAndRet(HasArgsInRegisters() ? 0 : 2, eq, a0, Operand(zero_reg));
4413 Register InstanceofStub::left() { return a0; }
4439 // through register a0. Use unsigned comparison to get negative
4441 __ Branch(&slow, hs, a1, Operand(a0));
4444 __ subu(a3, a0, a1);
4454 __ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4455 __ Branch(&slow, Ugreater_equal, a1, Operand(a0));
4458 __ subu(a3, a0, a1);
4859 __ li(a0, Operand(address_of_regexp_stack_memory_size));
4860 __ lw(a0, MemOperand(a0, 0));
4861 __ Branch(&runtime, eq, a0, Operand(zero_reg));
4864 __ lw(a0, MemOperand(sp, kJSRegExpOffset));
4866 __ JumpIfSmi(a0, &runtime);
4867 __ GetObjectType(a0, a1, a1);
4871 __ lw(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset));
4878 __ GetObjectType(regexp_data, a0, a0);
4881 a0,
4887 __ lw(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
4888 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
4907 __ GetObjectType(subject, a0, a0);
4908 __ And(a0, a0, Operand(kIsNotStringMask));
4910 __ Branch(&runtime, ne, a0, Operand(zero_reg));
4921 __ lw(a0, MemOperand(sp, kPreviousIndexOffset));
4922 __ JumpIfNotSmi(a0, &runtime);
4923 __ Branch(&runtime, ls, a3, Operand(a0));
4929 __ lw(a0, MemOperand(sp, kLastMatchInfoOffset));
4930 __ JumpIfSmi(a0, &runtime);
4931 __ GetObjectType(a0, a1, a1);
4935 FieldMemOperand(a0, JSArray::kElementsOffset));
4936 __ lw(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
4937 __ Branch(&runtime, ne, a0, Operand(
4941 __ lw(a0,
4944 __ sra(at, a0, kSmiTagSize); // Untag length for comparison.
4953 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
4954 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
4958 a0,
4966 // a0: instance type if Subject string
4996 __ lw(a0, FieldMemOperand(subject, ConsString::kSecondOffset));
4998 __ Branch(&runtime, ne, a0, Operand(a1));
5002 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
5003 __ lbu(a0a0, Map::kInstanceTypeOffset));
5005 __ And(at, a0, Operand(kStringRepresentationMask));
5011 // a0: Instance type of subject string
5016 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for ASCII.
5018 __ sra(a3, a0, 2); // a3 is 1 for ASCII, 0 for UC16 (used below).
5020 __ Movz(t9, t1, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
5043 1, a0, a2);
5065 __ li(a0, Operand(ExternalReference::isolate_address()));
5066 __ sw(a0, MemOperand(sp, 4 * kPointerSize));
5069 __ li(a0, Operand(1));
5070 __ sw(a0, MemOperand(sp, 3 * kPointerSize));
5073 __ li(a0, Operand(address_of_regexp_stack_memory_address));
5074 __ lw(a0, MemOperand(a0, 0));
5077 __ addu(a0, a0, a2);
5078 __ sw(a0, MemOperand(sp, 2 * kPointerSize));
5081 __ li(a0, Operand(
5083 __ sw(a0, MemOperand(sp, 1 * kPointerSize));
5110 // Argument 1 (a0): Subject string.
5111 __ mov(a0, subject);
5146 __ LoadRoot(a0, Heap::kTerminationExceptionRootIndex);
5148 __ Branch(&termination_exception, eq, v0, Operand(a0));
5206 __ Addu(a0,
5217 __ sw(a3, MemOperand(a0, 0));
5219 __ addiu(a0, a0, kPointerSize); // In branch delay slot.
5228 // a0: scratch
5230 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
5231 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
5235 __ And(at, a0, Operand(kIsIndirectStringMask));
5442 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE));
5456 __ li(a0, Operand(argc_)); // Set up the number of arguments.
5466 // a0 : number of arguments
5487 // a0: number of arguments
5510 ASSERT((lhs_.is(a0) && rhs_.is(a1)) ||
5511 (lhs_.is(a1) && rhs_.is(a0)));
5524 stream->Add(lhs_.is(a0) ? "_a0" : "_a1");
5525 stream->Add(rhs_.is(a0) ? "_a0" : "_a1");
5536 ASSERT((lhs_.is(a0) && rhs_.is(a1)) ||
5537 (lhs_.is(a1) && rhs_.is(a0)));
5539 | RegisterField::encode(lhs_.is(a0))
6405 __ lw(a0, MemOperand(sp, 0 * kPointerSize)); // Right.
6408 __ Branch(¬_same, ne, a0, Operand(a1));
6418 __ JumpIfNotBothSequentialAsciiStrings(a1, a0, a2, a3, &runtime);
6423 GenerateCompareFlatAsciiStrings(masm, a1, a0, a2, a3, t0, t1);
6441 __ lw(a0, MemOperand(sp, 1 * kPointerSize)); // First argument.
6446 __ JumpIfEitherSmi(a0, a1, &call_runtime);
6448 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset));
6463 masm, 1 * kPointerSize, a0, a2, a3, t0, t1, &call_builtin);
6474 // a0: first string
6485 __ lw(a2, FieldMemOperand(a0, String::kLengthOffset));
6487 __ mov(v0, a0); // Assume we'll return first string (from a0).
6505 // a0: first string
6522 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset));
6531 __ lbu(a2, FieldMemOperand(a0, SeqAsciiString::kHeaderSize));
6566 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset));
6583 __ sw(a0, FieldMemOperand(v0, ConsString::kFirstOffset));
6611 // a0: first string
6621 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset));
6638 __ addiu(t3, a0, SeqAsciiString::kHeaderSize - kHeapObjectTag);
6644 __ lw(t3, FieldMemOperand(a0, ExternalString::kResourceDataOffset));
6764 __ Or(a2, a1, a0);
6769 __ Subu(v0, a0, a1);
6773 __ SmiUntag(a0);
6774 __ Subu(v0, a1, a0);
6789 __ And(a2, a1, Operand(a0));
6792 __ GetObjectType(a0, a2, a2);
6805 __ Subu(a2, a0, Operand(kHeapObjectTag));
6831 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, a1, a0);
6838 __ Branch(&miss, ne, a0, Operand(at));
6861 Register right = a0;
6877 // Make sure a0 is non-zero. At this point input operands are
6879 ASSERT(right.is(a0));
6901 Register right = a0;
6942 // Make sure a0 is non-zero. At this point input operands are
6944 ASSERT(right.is(a0));
6946 __ mov(v0, a0); // In the delay slot.
6981 __ And(a2, a1, Operand(a0));
6984 __ GetObjectType(a0, a2, a2);
6991 __ subu(v0, a0, a1);
7000 __ And(a2, a1, a0);
7002 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
7008 __ subu(v0, a0, a1);
7020 __ Push(a1, a0);
7022 __ Push(a1, a0);
7030 __ Pop(a1, a0, ra);
7160 a2.bit() | a1.bit() | a0.bit() | v0.bit());
7163 __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
7233 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
7237 if (name.is(a0)) {
7240 __ Move(a0, elements);
7242 __ Move(a0, elements);
7269 Register dictionary = a0;
7384 { REG(t2), REG(a2), REG(a0), EMIT_REMEMBERED_SET },
7387 { REG(t1), REG(a0), REG(t2), EMIT_REMEMBERED_SET },
7523 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
7525 ASSERT(!address.is(a0));
7527 __ Move(a0, regs_.object());
7631 // -- a0 : element value to store
7646 __ JumpIfSmi(a0, &smi_element);
7653 __ Push(a1, a3, a0);
7665 __ sw(a0, MemOperand(t2, 0));
7667 __ RecordWrite(t1, t2, a0, kRAHasNotBeenSaved, kDontSaveFPRegs,
7670 __ mov(v0, a0);
7678 __ sw(a0, FieldMemOperand(t2, FixedArray::kHeaderSize));
7680 __ mov(v0, a0);
7685 __ StoreNumberToDoubleElements(a0, a3, a1, t1, t2, t3, t5, a2,
7688 __ mov(v0, a0);