Lines Matching refs:__
56 #define __ ACCESS_MASM(masm_)
84 __ bind(&patch_site_);
85 __ andi(at, reg, 0);
87 __ Branch(target, eq, at, Operand(zero_reg));
95 __ bind(&patch_site_);
96 __ andi(at, reg, 0);
98 __ Branch(target, ne, at, Operand(zero_reg));
105 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
110 __ nop(); // Signals no inlined code.
156 __ stop("stop-at");
166 __ Branch(&ok, eq, t1, Operand(zero_reg));
168 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
169 __ sw(a2, MemOperand(sp, receiver_offset));
170 __ bind(&ok);
180 __ Push(ra, fp, cp, a1);
184 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
187 __ Addu(fp, sp, Operand(2 * kPointerSize));
191 __ push(at);
202 __ push(a1);
205 __ CallStub(&stub);
207 __ CallRuntime(Runtime::kNewFunctionContext, 1);
212 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
221 __ lw(a0, MemOperand(fp, parameter_offset));
224 __ sw(a0, target);
227 __ RecordWriteContextSlot(
239 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
241 __ mov(a3, a1);
246 __ Addu(a2, fp,
248 __ li(a1, Operand(Smi::FromInt(num_parameters)));
249 __ Push(a3, a2, a1);
264 __ CallStub(&stub);
270 __ CallRuntime(Runtime::kTraceEnter, 0);
297 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
298 __ Branch(&ok, hs, sp, Operand(t0));
300 __ CallStub(&stub);
301 __ bind(&ok);
314 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
322 __ mov(v0, zero_reg);
327 __ li(a2, Operand(profiling_counter_));
328 __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
329 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
330 __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
344 __ li(a2, Operand(profiling_counter_));
345 __ li(a3, Operand(Smi::FromInt(reset_value)));
346 __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
373 __ slt(at, a3, zero_reg);
374 __ beq(at, zero_reg, &ok);
377 __ CallStub(&stub);
379 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
380 __ sltu(at, sp, t0);
381 __ beq(at, zero_reg, &ok);
384 __ CallStub(&stub);
394 __ bind(&ok);
406 __ Branch(&return_label_);
408 __ bind(&return_label_);
412 __ push(v0);
413 __ CallRuntime(Runtime::kTraceExit, 1);
427 __ Branch(&ok, ge, a3, Operand(zero_reg));
428 __ push(v0);
430 __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
431 __ push(a2);
432 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
435 __ CallStub(&stub);
437 __ pop(v0);
439 __ bind(&ok);
450 // Here we use masm_-> instead of the __
454 __ RecordJSReturn();
485 __ push(result_register());
503 __ LoadRoot(result_register(), index);
509 __ LoadRoot(result_register(), index);
510 __ push(result_register());
522 if (false_label_ != fall_through_) __ Branch(false_label_);
524 if (true_label_ != fall_through_) __ Branch(true_label_);
526 __ LoadRoot(result_register(), index);
538 __ li(result_register(), Operand(lit));
544 __ li(result_register(), Operand(lit));
545 __ push(result_register());
556 if (false_label_ != fall_through_) __ Branch(false_label_);
558 if (true_label_ != fall_through_) __ Branch(true_label_);
561 if (false_label_ != fall_through_) __ Branch(false_label_);
563 if (true_label_ != fall_through_) __ Branch(true_label_);
567 if (false_label_ != fall_through_) __ Branch(false_label_);
569 if (true_label_ != fall_through_) __ Branch(true_label_);
573 __ li(result_register(), Operand(lit));
582 __ Drop(count);
590 __ Drop(count);
591 __ Move(result_register(), reg);
598 if (count > 1) __ Drop(count - 1);
599 __ sw(reg, MemOperand(sp, 0));
607 __ Drop(count);
608 __ Move(result_register(), reg);
617 __ bind(materialize_true);
625 __ bind(materialize_true);
626 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
627 __ Branch(&done);
628 __ bind(materialize_false);
629 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
630 __ bind(&done);
638 __ bind(materialize_true);
639 __ LoadRoot(at, Heap::kTrueValueRootIndex);
640 __ push(at);
641 __ Branch(&done);
642 __ bind(materialize_false);
643 __ LoadRoot(at, Heap::kFalseValueRootIndex);
644 __ push(at);
645 __ bind(&done);
663 __ LoadRoot(result_register(), value_root_index);
670 __ LoadRoot(at, value_root_index);
671 __ push(at);
681 if (true_label_ != fall_through_) __ Branch(true_label_);
683 if (false_label_ != fall_through_) __ Branch(false_label_);
694 __ CallStub(&stub);
695 __ mov(at, zero_reg);
699 __ push(result_register());
700 __ CallRuntime(Runtime::kToBool, 1);
701 __ LoadRoot(at, Heap::kFalseValueRootIndex);
714 __ Branch(if_true, cc, lhs, rhs);
716 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
718 __ Branch(if_true, cc, lhs, rhs);
719 __ Branch(if_false);
742 __ LoadContext(scratch, context_chain_length);
753 __ lw(dest, location);
766 __ sw(src, location);
769 __ RecordWriteContextSlot(scratch0,
789 if (should_normalize) __ Branch(&skip);
792 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
794 __ bind(&skip);
818 __ sw(result_register(), StackOperand(variable));
821 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
822 __ sw(t0, StackOperand(variable));
832 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
833 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
834 __ Check(ne, "Declaration in with context.",
836 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
837 __ Check(ne, "Declaration in catch context.",
843 __ sw(result_register(), ContextOperand(cp, variable->index()));
846 __ RecordWriteContextSlot(cp,
857 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
858 __ sw(at, ContextOperand(cp, variable->index()));
866 __ li(a2, Operand(variable->name()));
874 __ li(a1, Operand(Smi::FromInt(attr)));
880 __ Push(cp, a2, a1);
884 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
885 __ Push(cp, a2, a1, a0);
888 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
889 __ Push(cp, a2, a1, a0);
891 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
901 __ li(a1, Operand(pairs));
902 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
903 __ Push(cp, a1, a0);
904 __ CallRuntime(Runtime::kDeclareGlobals, 3);
934 __ bind(&next_test);
939 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
942 __ lw(a1, MemOperand(sp, 0)); // Switch value.
947 __ or_(a2, a1, a0);
950 __ Branch(&next_test, ne, a1, Operand(a0));
951 __ Drop(1); // Switch value is no longer needed.
952 __ Branch(clause->body_target());
954 __ bind(&slow_case);
963 __ Branch(&next_test, ne, v0, Operand(zero_reg));
964 __ Drop(1); // Switch value is no longer needed.
965 __ Branch(clause->body_target());
970 __ bind(&next_test);
971 __ Drop(1); // Switch value is no longer needed.
973 __ Branch(nested_statement.break_label());
975 __ Branch(default_clause->body_target());
982 __ bind(clause->body_target());
987 __ bind(nested_statement.break_label());
1004 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1005 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1006 __ Branch(&exit, eq, a0, Operand(at));
1008 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1009 __ Branch(&exit, eq, a0, Operand(null_value));
1011 __ mov(a0, v0);
1014 __ JumpIfSmi(a0, &convert);
1015 __ GetObjectType(a0, a1, a1);
1016 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1017 __ bind(&convert);
1018 __ push(a0);
1019 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1020 __ mov(a0, v0);
1021 __ bind(&done_convert);
1022 __ push(a0);
1027 __ GetObjectType(a0, a1, a1);
1028 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1034 __ CheckEnumCache(null_value, &call_runtime);
1039 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1040 __ Branch(&use_cache);
1043 __ bind(&call_runtime);
1044 __ push(a0); // Duplicate the enumerable object on the stack.
1045 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1051 __ mov(a2, v0);
1052 __ lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
1053 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1054 __ Branch(&fixed_array, ne, a1, Operand(at));
1057 __ bind(&use_cache);
1058 __ LoadInstanceDescriptors(v0, a1);
1059 __ lw(a1, FieldMemOperand(a1, DescriptorArray::kEnumerationIndexOffset));
1060 __ lw(a2, FieldMemOperand(a1, DescriptorArray::kEnumCacheBridgeCacheOffset));
1063 __ push(v0); // Map.
1064 __ lw(a1, FieldMemOperand(a2, FixedArray::kLengthOffset));
1065 __ li(a0, Operand(Smi::FromInt(0)));
1067 __ Push(a2, a1, a0);
1068 __ jmp(&loop);
1072 __ bind(&fixed_array);
1079 __ LoadHeapObject(a1, cell);
1080 __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1081 __ sw(a2, FieldMemOperand(a1, JSGlobalPropertyCell::kValueOffset));
1083 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1084 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1086 __ GetObjectType(a2, a3, a3);
1087 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1088 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1089 __ bind(&non_proxy);
1090 __ Push(a1, v0); // Smi and array
1091 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1092 __ li(a0, Operand(Smi::FromInt(0)));
1093 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1097 __ bind(&loop);
1099 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1100 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1101 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1104 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1105 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1106 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1107 __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1108 __ lw(a3, MemOperand(t0)); // Current entry.
1112 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1117 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1118 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1119 __ Branch(&update_each, eq, t0, Operand(a2));
1124 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1129 __ push(a1); // Enumerable.
1130 __ push(a3); // Current entry.
1131 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1132 __ mov(a3, result_register());
1133 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1137 __ bind(&update_each);
1138 __ mov(result_register(), a3);
1149 __ bind(loop_statement.continue_label());
1150 __ pop(a0);
1151 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1152 __ push(a0);
1155 __ Branch(&loop);
1158 __ bind(loop_statement.break_label());
1159 __ Drop(5);
1163 __ bind(&exit);
1182 __ li(a0, Operand(info));
1183 __ push(a0);
1184 __ CallStub(&stub);
1186 __ li(a0, Operand(info));
1187 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1189 __ Push(cp, a0, a1);
1190 __ CallRuntime(Runtime::kNewClosure, 3);
1214 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1215 __ Branch(slow, ne, temp, Operand(zero_reg));
1218 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1231 __ Move(next, current);
1233 __ bind(&loop);
1235 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1236 __ LoadRoot(t0, Heap::kGlobalContextMapRootIndex);
1237 __ Branch(&fast, eq, temp, Operand(t0));
1239 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1240 __ Branch(slow, ne, temp, Operand(zero_reg));
1242 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1243 __ Branch(&loop);
1244 __ bind(&fast);
1247 __ lw(a0, GlobalObjectOperand());
1248 __ li(a2, Operand(var->name()));
1268 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1269 __ Branch(slow, ne, temp, Operand(zero_reg));
1271 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1277 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1278 __ Branch(slow, ne, temp, Operand(zero_reg));
1298 __ Branch(done);
1301 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1305 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1306 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1308 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1309 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1311 __ Branch(done, ne, at, Operand(zero_reg));
1312 __ li(a0, Operand(var->name()));
1313 __ push(a0);
1314 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1317 __ Branch(done);
1334 __ lw(a0, GlobalObjectOperand());
1335 __ li(a2, Operand(var->name()));
1386 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1387 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1392 __ Branch(&done, ne, at, Operand(zero_reg));
1393 __ li(a0, Operand(var->name()));
1394 __ push(a0);
1395 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1396 __ bind(&done);
1400 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1401 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1416 __ bind(&slow);
1418 __ li(a1, Operand(var->name()));
1419 __ Push(cp, a1); // Context and name.
1420 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1421 __ bind(&done);
1438 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1439 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1442 __ lw(t1, FieldMemOperand(t0, literal_offset));
1443 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1444 __ Branch(&materialized, ne, t1, Operand(at));
1448 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1449 __ li(a2, Operand(expr->pattern()));
1450 __ li(a1, Operand(expr->flags()));
1451 __ Push(t0, a3, a2, a1);
1452 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1453 __ mov(t1, v0);
1455 __ bind(&materialized);
1458 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1459 __ jmp(&allocated);
1461 __ bind(&runtime_allocate);
1462 __ push(t1);
1463 __ li(a0, Operand(Smi::FromInt(size)));
1464 __ push(a0);
1465 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1466 __ pop(t1);
1468 __ bind(&allocated);
1474 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1481 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1482 __ push(a1);
1492 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1493 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1494 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1495 __ li(a1, Operand(constant_properties));
1502 __ li(a0, Operand(Smi::FromInt(flags)));
1503 __ Push(a3, a2, a1, a0);
1506 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1509 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1512 __ CallStub(&stub);
1532 __ push(v0); // Save result on stack.
1545 __ mov(a0, result_register());
1546 __ li(a2, Operand(key->handle()));
1547 __ lw(a1, MemOperand(sp));
1561 __ lw(a0, MemOperand(sp));
1562 __ push(a0);
1566 __ li(a0, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1567 __ push(a0);
1568 __ CallRuntime(Runtime::kSetProperty, 4);
1570 __ Drop(3);
1587 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1588 __ push(a0);
1592 __ li(a0, Operand(Smi::FromInt(NONE)));
1593 __ push(a0);
1594 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1599 __ lw(a0, MemOperand(sp));
1600 __ push(a0);
1601 __ CallRuntime(Runtime::kToFastProperties, 1);
1626 __ mov(a0, result_register());
1627 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1628 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1629 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1630 __ li(a1, Operand(constant_elements));
1631 __ Push(a3, a2, a1);
1636 __ CallStub(&stub);
1637 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1640 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1642 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1651 __ CallStub(&stub);
1668 __ push(v0);
1676 __ lw(t2, MemOperand(sp)); // Copy of array literal.
1677 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1678 __ sw(result_register(), FieldMemOperand(a1, offset));
1680 __ RecordWriteField(a1, offset, result_register(), a2,
1684 __ lw(a1, MemOperand(sp)); // Copy of array literal.
1685 __ lw(a2, FieldMemOperand(a1, JSObject::kMapOffset));
1686 __ li(a3, Operand(Smi::FromInt(i)));
1687 __ li(t0, Operand(Smi::FromInt(expr->literal_index())));
1688 __ mov(a0, result_register());
1690 __ CallStub(&stub);
1732 __ push(result_register());
1742 __ lw(a1, MemOperand(sp, 0));
1743 __ push(v0);
1772 __ push(v0); // Left operand goes on the stack.
1820 __ mov(a0, result_register());
1821 __ li(a2, Operand(key->handle()));
1830 __ mov(a0, result_register());
1850 __ pop(left);
1851 __ mov(a0, result_register());
1854 __ Or(scratch1, left, Operand(right));
1859 __ bind(&stub_call);
1863 __ jmp(&done);
1865 __ bind(&smi_case);
1871 __ Branch(&stub_call);
1872 __ GetLeastBitsFromSmi(scratch1, right, 5);
1873 __ srav(right, left, scratch1);
1874 __ And(v0, right, Operand(~kSmiTagMask));
1877 __ Branch(&stub_call);
1878 __ SmiUntag(scratch1, left);
1879 __ GetLeastBitsFromSmi(scratch2, right, 5);
1880 __ sllv(scratch1, scratch1, scratch2);
1881 __ Addu(scratch2, scratch1, Operand(0x40000000));
1882 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1883 __ SmiTag(v0, scratch1);
1887 __ Branch(&stub_call);
1888 __ SmiUntag(scratch1, left);
1889 __ GetLeastBitsFromSmi(scratch2, right, 5);
1890 __ srlv(scratch1, scratch1, scratch2);
1891 __ And(scratch2, scratch1, 0xc0000000);
1892 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1893 __ SmiTag(v0, scratch1);
1897 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
1898 __ BranchOnOverflow(&stub_call, scratch1);
1901 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
1902 __ BranchOnOverflow(&stub_call, scratch1);
1905 __ SmiUntag(scratch1, right);
1906 __ Mult(left, scratch1);
1907 __ mflo(scratch1);
1908 __ mfhi(scratch2);
1909 __ sra(scratch1, scratch1, 31);
1910 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
1911 __ mflo(v0);
1912 __ Branch(&done, ne, v0, Operand(zero_reg));
1913 __ Addu(scratch2, right, left);
1914 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1916 __ mov(v0, zero_reg);
1920 __ Or(v0, left, Operand(right));
1923 __ And(v0, left, Operand(right));
1926 __ Xor(v0, left, Operand(right));
1932 __ bind(&done);
1940 __ mov(a0, result_register());
1941 __ pop(a1);
1977 __ push(result_register()); // Preserve value.
1979 __ mov(a1, result_register());
1980 __ pop(a0); // Restore value.
1981 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
1989 __ push(result_register()); // Preserve value.
1992 __ mov(a1, result_register());
1993 __ pop(a2);
1994 __ pop(a0); // Restore value.
2010 __ mov(a0, result_register());
2011 __ li(a2, Operand(var->name()));
2012 __ lw(a1, GlobalObjectOperand());
2023 __ lw(a1, StackOperand(var));
2024 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2025 __ Branch(&skip, ne, a1, Operand(t0));
2026 __ sw(result_register(), StackOperand(var));
2027 __ bind(&skip);
2035 __ push(v0);
2036 __ li(a0, Operand(var->name()));
2037 __ Push(cp, a0); // Context and name.
2038 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2044 __ push(v0); // Value.
2045 __ li(a1, Operand(var->name()));
2046 __ li(a0, Operand(Smi::FromInt(language_mode())));
2047 __ Push(cp, a1, a0); // Context, name, strict mode.
2048 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2053 __ lw(a3, location);
2054 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2055 __ Branch(&assign, ne, a3, Operand(t0));
2056 __ li(a3, Operand(var->name()));
2057 __ push(a3);
2058 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2060 __ bind(&assign);
2061 __ sw(result_register(), location);
2064 __ mov(a3, result_register());
2066 __ RecordWriteContextSlot(
2078 __ lw(a2, location);
2079 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2080 __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
2083 __ sw(v0, location);
2085 __ mov(a3, v0);
2087 __ RecordWriteContextSlot(
2092 __ push(v0); // Value.
2093 __ li(a1, Operand(var->name()));
2094 __ li(a0, Operand(Smi::FromInt(language_mode())));
2095 __ Push(cp, a1, a0); // Context, name, strict mode.
2096 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2113 __ push(result_register());
2114 __ lw(t0, MemOperand(sp, kPointerSize)); // Receiver is now under value.
2115 __ push(t0);
2116 __ CallRuntime(Runtime::kToSlowProperties, 1);
2117 __ pop(result_register());
2122 __ mov(a0, result_register()); // Load the value.
2123 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2127 __ lw(a1, MemOperand(sp));
2129 __ pop(a1);
2139 __ push(v0); // Result of assignment, saved even if not needed.
2141 __ lw(t0, MemOperand(sp, kPointerSize));
2142 __ push(t0);
2143 __ CallRuntime(Runtime::kToFastProperties, 1);
2144 __ pop(v0);
2145 __ Drop(1);
2159 __ push(result_register());
2161 __ lw(t0, MemOperand(sp, 2 * kPointerSize));
2162 __ push(t0);
2163 __ CallRuntime(Runtime::kToSlowProperties, 1);
2164 __ pop(result_register());
2174 __ mov(a0, result_register());
2175 __ pop(a1); // Key.
2179 __ lw(a2, MemOperand(sp));
2181 __ pop(a2);
2191 __ push(v0); // Result of assignment, saved even if not needed.
2193 __ lw(t0, MemOperand(sp, kPointerSize));
2194 __ push(t0);
2195 __ CallRuntime(Runtime::kToFastProperties, 1);
2196 __ pop(v0);
2197 __ Drop(1);
2215 __ pop(a1);
2226 __ Call(code, rmode, ast_id);
2240 __ li(a2, Operand(name));
2250 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2262 __ pop(a1);
2263 __ push(v0);
2264 __ push(a1);
2279 __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2283 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2300 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2301 __ CallStub(&stub);
2304 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2312 __
2314 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2316 __ push(a1);
2320 __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize));
2321 __ push(a1);
2323 __ li(a1, Operand(Smi::FromInt(language_mode())));
2324 __ push(a1);
2327 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2328 __ push(a1);
2331 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2357 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2358 __ push(a2); // Reserved receiver slot.
2367 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2368 __ push(a1);
2373 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2374 __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
2379 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2380 __ CallStub(&stub);
2383 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2387 __ lw(a0, GlobalObjectOperand());
2388 __ push(a0);
2400 __ bind(&slow);
2403 __ push(context_register());
2404 __ li(a2, Operand(proxy->name()));
2405 __ push(a2);
2406 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2407 __ Push(v0, v1); // Function, receiver.
2414 __ Branch(&call);
2415 __ bind(&done);
2417 __ push(v0);
2420 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
2421 __ push(a1);
2422 __ bind(&call);
2446 __ lw(a1, GlobalObjectOperand());
2447 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2448 __ push(a1);
2483 __ li(a0, Operand(arg_count));
2484 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2495 __ li(a2, Operand(cell));
2501 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2521 __ And(t0, v0, Operand(kSmiTagMask));
2542 __ And(at, v0, Operand(kSmiTagMask | 0x80000000));
2562 __ JumpIfSmi(v0, if_false);
2563 __ LoadRoot(at, Heap::kNullValueRootIndex);
2564 __ Branch(if_true, eq, v0, Operand(at));
2565 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
2567 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
2568 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2569 __ Branch(if_false, ne, at, Operand(zero_reg));
2570 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
2571 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2593 __ JumpIfSmi(v0, if_false);
2594 __ GetObjectType(v0, a1, a1);
2616 __ JumpIfSmi(v0, if_false);
2617 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2618 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
2619 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2641 if (FLAG_debug_code) __ AbortIfSmi(v0);
2643 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2644 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
2645 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2646 __ Branch(if_true, ne, t0, Operand(zero_reg));
2649 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2650 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
2651 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
2652 __ Branch(if_false, eq, a2, Operand(t0));
2657 __ LoadInstanceDescriptors(a1, t0);
2658 __ lw(a3, FieldMemOperand(t0, FixedArray::kLengthOffset));
2665 __ Addu(a2, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2666 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
2667 __ Addu(a2, a2, t1);
2670 __ Addu(t0,
2679 __ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
2680 __ jmp(&entry);
2681 __ bind(&loop);
2682 __ lw(a3, MemOperand(t0, 0));
2683 __ Branch(if_false, eq, a3, Operand(t2));
2684 __ Addu(t0, t0, Operand(kPointerSize));
2685 __ bind(&entry);
2686 __ Branch(&loop, ne, t0, Operand(a2));
2690 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
2691 __ JumpIfSmi(a2, if_false);
2692 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
2693 __ lw(a3, ContextOperand(cp, Context::GLOBAL_INDEX));
2694 __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset));
2695 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2696 __ Branch(if_false, ne, a2, Operand(a3));
2700 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
2701 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2702 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
2703 __ jmp(if_true);
2723 __ JumpIfSmi(v0, if_false);
2724 __ GetObjectType(v0, a1, a2);
2726 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
2727 __ Branch(if_false);
2746 __ JumpIfSmi(v0, if_false);
2747 __ GetObjectType(v0, a1, a1);
2769 __ JumpIfSmi(v0, if_false);
2770 __ GetObjectType(v0, a1, a1);
2789 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2793 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
2794 __ Branch(&check_frame_marker, ne,
2796 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
2799 __ bind(&check_frame_marker);
2800 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
2824 __ pop(a1);
2839 __ mov(a1, v0);
2840 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2842 __ CallStub(&stub);
2851 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2854 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2855 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
2856 __ Branch(&exit, ne, a3,
2861 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
2863 __ bind(&exit);
2876 __ JumpIfSmi(v0, &null);
2883 __ GetObjectType(v0, v0, a1); // Map is now in v0.
2884 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
2888 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
2892 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
2897 __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset));
2898 __ GetObjectType(v0, a1, a1);
2899 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
2903 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
2904 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
2905 __ Branch(&done);
2908 __ bind(&function);
2909 __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex);
2910 __ jmp(&done);
2913 __ bind(&non_function_constructor);
2914 __ LoadRoot(v0, Heap::kObject_symbolRootIndex);
2915 __ jmp(&done);
2918 __ bind(&null);
2919 __ LoadRoot(v0, Heap::kNullValueRootIndex);
2922 __ bind(&done);
2941 __ CallRuntime(Runtime::kLog, 2);
2945 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2957 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
2958 __ AllocateHeapNumber(s0, a1, a2, t6, &slow_allocate_heapnumber);
2959 __ jmp(&heapnumber_allocated);
2961 __ bind(&slow_allocate_heapnumber);
2964 __ CallRuntime(Runtime::kNumberAlloc, 0);
2965 __ mov(s0, v0); // Save result in s0, so it is saved thru CFunc call.
2967 __ bind(&heapnumber_allocated);
2973 __ PrepareCallCFunction(1, a0);
2974 __ lw(a0, ContextOperand(cp, Context::GLOBAL_INDEX));
2975 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
2976 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2980 __ li(a1, Operand(0x41300000));
2982 __ Move(f12, v0, a1);
2984 __ Move(f14, zero_reg, a1);
2986 __ sub_d(f0, f12, f14);
2987 __ sdc1(f0, MemOperand(s0, HeapNumber::kValueOffset - kHeapObjectTag));
2988 __ mov(v0, s0);
2990 __ PrepareCallCFunction(2, a0);
2991 __ mov(a0, s0);
2992 __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX));
2993 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalContextOffset));
2994 __ CallCFunction(
3010 __ CallStub(&stub);
3024 __ CallStub(&stub);
3037 __ JumpIfSmi(v0, &done);
3039 __ GetObjectType(v0, a1, a1);
3040 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3042 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3044 __ bind(&done);
3064 __ AbortIfSmi(object);
3065 __ GetObjectType(object, scratch1, scratch1);
3066 __ Assert(eq, "Trying to get date field from non-date.",
3071 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3075 __ li(scratch1, Operand(stamp));
3076 __ lw(scratch1, MemOperand(scratch1));
3077 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3078 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3079 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3081 __ jmp(&done);
3083 __ bind(&runtime);
3084 __ PrepareCallCFunction(2, scratch1);
3085 __ li(a1, Operand(index));
3086 __ Move(a0, object);
3087 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3088 __ bind(&done);
3103 __ CallStub(&stub);
3105 __ CallRuntime(Runtime::kMath_pow, 2);
3117 __ pop(a1); // v0 = value. a1 = object.
3121 __ JumpIfSmi(a1, &done);
3124 __ GetObjectType(a1, a2, a2);
3125 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3128 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3131 __ mov(a2, v0);
3132 __ RecordWriteField(
3135 __ bind(&done);
3148 __ CallStub(&stub);
3162 __ jmp(&done);
3167 __ bind(&done);
3178 __ mov(a0, result_register());
3184 __ pop(object);
3197 __ jmp(&done);
3199 __ bind(&index_out_of_range);
3202 __ LoadRoot(result, Heap::kNanValueRootIndex);
3203 __ jmp(&done);
3205 __ bind(&need_conversion);
3208 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3209 __ jmp(&done);
3214 __ bind(&done);
3225 __ mov(a0, result_register());
3232 __ pop(object);
3246 __ jmp(&done);
3248 __ bind(&index_out_of_range);
3251 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3252 __ jmp(&done);
3254 __ bind(&need_conversion);
3257 __ li(result, Operand(Smi::FromInt(0)));
3258 __ jmp(&done);
3263 __ bind(&done);
3275 __ CallStub(&stub);
3288 __ CallStub(&stub);
3300 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3301 __ CallStub(&stub);
3313 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3314 __ CallStub(&stub);
3326 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3327 __ CallStub(&stub);
3339 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
3340 __ CallStub(&stub);
3350 __ CallRuntime(Runtime::kMath_sqrt, 1);
3367 __ GetObjectType(v0, a1, a1);
3368 __ Branch(&proxy, eq, a1, Operand(JS_FUNCTION_PROXY_TYPE));
3371 __ mov(a1, result_register());
3373 __ InvokeFunction(a1, count, CALL_FUNCTION,
3375 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3376 __ jmp(&done);
3378 __ bind(&proxy);
3379 __ push(v0);
3380 __ CallRuntime(Runtime::kCall, args->length());
3381 __ bind(&done);
3394 __ CallStub(&stub);
3409 __ Abort("Attempt to use undefined cache.");
3410 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3419 __ lw(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
3420 __ lw(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
3421 __ lw(cache,
3424 __ lw(cache,
3430 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3432 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3434 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
3435 __ addu(a3, a3, at);
3437 __ lw(a2, MemOperand(a3));
3438 __ Branch(¬_found, ne, key, Operand(a2));
3440 __ lw(v0, MemOperand(a3, kPointerSize));
3441 __ Branch(&done);
3443 __ bind(¬_found);
3445 __ Push(cache, key);
3446 __ CallRuntime(Runtime::kGetFromCache, 2);
3448 __ bind(&done);
3464 __ pop(left);
3467 __ Branch(&ok, eq, left, Operand(right));
3469 __ And(tmp, left, Operand(right));
3470 __ JumpIfSmi(tmp, &fail);
3471 __ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3472 __ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3473 __ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE));
3474 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3475 __ Branch(&fail, ne, tmp, Operand(tmp2));
3476 __ lw(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3477 __ lw(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3478 __ Branch(&ok, eq, tmp, Operand(tmp2));
3479 __ bind(&fail);
3480 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3481 __ jmp(&done);
3482 __ bind(&ok);
3483 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3484 __ bind(&done);
3501 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3502 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3517 __ AbortIfNotString(v0);
3520 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3521 __ IndexFromHash(v0, v0);
3553 __ pop(separator);
3556 __ JumpIfSmi(array, &bailout);
3557 __ GetObjectType(array, scratch1, scratch2);
3558 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3561 __ CheckFastElements(scratch1, scratch2, &bailout);
3564 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3565 __ SmiUntag(array_length);
3566 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3567 __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
3568 __ Branch(&done);
3570 __ bind(&non_trivial_array);
3574 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3579 __ mov(string_length, zero_reg);
3580 __ Addu(element,
3582 __ sll(elements_end, array_length, kPointerSizeLog2);
3583 __ Addu(elements_end, element, elements_end);
3593 __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
3596 __ bind(&loop);
3597 __ lw(string, MemOperand(element));
3598 __ Addu(element, element, kPointerSize);
3599 __ JumpIfSmi(string, &bailout);
3600 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3601 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3602 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3603 __ lw(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3604 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3605 __ BranchOnOverflow(&bailout, scratch3);
3606 __ Branch(&loop, lt, element, Operand(elements_end));
3609 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
3610 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3611 __ Branch(&done);
3613 __ bind(¬_size_one_array);
3622 __ JumpIfSmi(separator, &bailout);
3623 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3624 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3625 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3630 __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3631 __ Subu(string_length, string_length, Operand(scratch1));
3632 __ Mult(array_length, scratch1);
3635 __ mfhi(scratch2);
3636 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
3637 __ mflo(scratch2);
3638 __ And(scratch3, scratch2, Operand(0x80000000));
3639 __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
3640 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
3641 __ BranchOnOverflow(&bailout, scratch3);
3642 __ SmiUntag(string_length);
3646 __ Addu(element,
3655 __ AllocateAsciiString(result,
3664 __ sll(elements_end, array_length, kPointerSizeLog2);
3665 __ Addu(elements_end, element, elements_end);
3668 __ Addu(result_pos,
3673 __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3674 __ li(at, Operand(Smi::FromInt(1)));
3675 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
3676 __ Branch(&long_separator, gt, scratch1, Operand(at));
3679 __ bind(&empty_separator_loop);
3686 __ lw(string, MemOperand(element));
3687 __ Addu(element, element, kPointerSize);
3688 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3689 __ SmiUntag(string_length);
3690 __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3691 __ CopyBytes(string, result_pos, string_length, scratch1);
3693 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
3695 __ Branch(&done);
3698 __ bind(&one_char_separator);
3700 __ lbu(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3703 __ jmp(&one_char_separator_loop_entry);
3705 __ bind(&one_char_separator_loop);
3713 __ sb(separator, MemOperand(result_pos));
3714 __ Addu(result_pos, result_pos, 1);
3717 __ bind(&one_char_separator_loop_entry);
3718 __ lw(string, MemOperand(element));
3719 __ Addu(element, element, kPointerSize);
3720 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3721 __ SmiUntag(string_length);
3722 __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3723 __ CopyBytes(string, result_pos, string_length, scratch1);
3725 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
3727 __ Branch(&done);
3731 __ bind(&long_separator_loop);
3739 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
3740 __ SmiUntag(string_length);
3741 __ Addu(string,
3744 __ CopyBytes(string, result_pos, string_length, scratch1);
3746 __ bind(&long_separator);
3747 __ lw(string, MemOperand(element));
3748 __ Addu(element, element, kPointerSize);
3749 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3750 __ SmiUntag(string_length);
3751 __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3752 __ CopyBytes(string, result_pos, string_length, scratch1);
3754 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
3756 __ Branch(&done);
3758 __ bind(&bailout);
3759 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3760 __ bind(&done);
3778 __ lw(a0, GlobalObjectOperand());
3779 __ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset));
3780 __ push(a0);
3791 __ li(a2, Operand(expr->name()));
3797 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3800 __ CallRuntime(expr->function(), arg_count);
3818 __ li(a1, Operand(Smi::FromInt(strict_mode_flag)));
3819 __ push(a1);
3820 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3828 __ lw(a2, GlobalObjectOperand());
3829 __ li(a1, Operand(var->name()));
3830 __ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
3831 __ Push(a2, a1, a0);
3832 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3841 __ push(context_register());
3842 __ li(a2, Operand(var->name()));
3843 __ push(a2);
3844 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3888 __ bind(&materialize_true);
3890 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3891 if (context()->IsStackValue()) __ push(v0);
3892 __ jmp(&done);
3893 __ bind(&materialize_false);
3895 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3896 if (context()->IsStackValue()) __ push(v0);
3897 __ bind(&done);
3907 __ CallRuntime(Runtime::kTypeof, 1);
3916 __ JumpIfSmi(result_register(), &no_conversion);
3917 __ mov(a0, result_register());
3919 __ CallStub(&convert_stub);
3920 __ bind(&no_conversion);
3950 __ mov(a0, result_register());
3987 __ li(at, Operand(Smi::FromInt(0)));
3988 __ push(at);
3993 __ push(v0);
3998 __ lw(a1, MemOperand(sp, 0));
3999 __ push(v0);
4014 __ JumpIfSmi(v0, &no_conversion);
4015 __ mov(a0, v0);
4017 __ CallStub(&convert_stub);
4018 __ bind(&no_conversion);
4028 __ push(v0);
4031 __ sw(v0, MemOperand(sp, kPointerSize));
4034 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4039 __ mov(a0, result_register());
4046 __ li(a1, Operand(Smi::FromInt(count_value)));
4049 __ AdduAndCheckForOverflow(v0, a0, a1, t0);
4050 __ BranchOnOverflow(&stub_call, t0); // Do stub on overflow.
4055 __ bind(&stub_call);
4064 __ bind(&done);
4089 __ mov(a0, result_register()); // Value.
4090 __ li(a2, Operand(prop->key()->AsLiteral()->handle())); // Name.
4091 __ pop(a1); // Receiver.
4107 __ mov(a0, result_register()); // Value.
4108 __ pop(a1); // Key.
4109 __ pop(a2); // Receiver.
4134 __ lw(a0, GlobalObjectOperand());
4135 __ li(a2, Operand(proxy->name()));
4149 __ bind(&slow);
4150 __ li(a0, Operand(proxy->name()));
4151 __ Push(cp, a0);
4152 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4154 __ bind(&done);
4179 __ JumpIfSmi(v0, if_true);
4180 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4181 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4184 __ JumpIfSmi(v0, if_false);
4186 __ GetObjectType(v0, v0, a1);
4187 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4188 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4189 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4193 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4194 __ Branch(if_true, eq, v0, Operand(at));
4195 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4199 __ LoadRoot(at, Heap::kNullValueRootIndex);
4202 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4203 __ Branch(if_true, eq, v0, Operand(at));
4204 __ JumpIfSmi(v0, if_false);
4206 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4207 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4208 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4211 __ JumpIfSmi(v0, if_false);
4213 __ GetObjectType(v0, v0, a1);
4214 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4218 __ JumpIfSmi(v0, if_false);
4220 __ LoadRoot(at, Heap::kNullValueRootIndex);
4221 __ Branch(if_true, eq, v0, Operand(at));
4224 __ GetObjectType(v0, v0, a1);
4225 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4226 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
4227 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4229 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4230 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4233 if (if_false != fall_through) __ jmp(if_false);
4261 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4263 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4270 __ CallStub(&stub);
4302 __ mov(a0, result_register());
4303 __ pop(a1);
4309 __ Or(a2, a0, Operand(a1));
4312 __ bind(&slow_case);
4345 __ mov(a0, result_register());
4346 __ LoadRoot(a1, nil_value);
4353 __ Branch(if_true, eq, a0, Operand(a1));
4354 __ LoadRoot(a1, other_nil_value);
4355 __ Branch(if_true, eq, a0, Operand(a1));
4356 __ JumpIfSmi(a0, if_false);
4358 __ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset));
4359 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
4360 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4368 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4385 __ sw(value, MemOperand(fp, frame_offset));
4390 __ lw(dst, ContextOperand(cp, context_index));
4401 __ li(at, Operand(Smi::FromInt(0)));
4406 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
4409 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4411 __ push(at);
4421 __ push(result_register());
4423 __ Subu(a1, ra, Operand(masm_->CodeObject()));
4426 __ Addu(a1, a1, Operand(a1)); // Convert to smi.
4427 __ push(a1);
4434 __ pop(a1);
4436 __ pop(result_register());
4438 __ sra(a1, a1, 1); // Un-smi-tag value.
4439 __ Addu(at, a1, Operand(masm_->CodeObject()));
4440 __ Jump(at);
4444 #undef __
4446 #define __ ACCESS_MASM(masm())
4457 __ Drop(*stack_depth); // Down to the handler block.
4460 __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4461 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4463 __ PopTryHandler();
4464 __ Call(finally_entry_);
4472 #undef __