Home | History | Annotate | Download | only in ia32
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_IA32)
     31 
     32 #include "bootstrapper.h"
     33 #include "codegen.h"
     34 #include "debug.h"
     35 #include "runtime.h"
     36 #include "serialize.h"
     37 
     38 namespace v8 {
     39 namespace internal {
     40 
     41 // -------------------------------------------------------------------------
     42 // MacroAssembler implementation.
     43 
     44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     45     : Assembler(arg_isolate, buffer, size),
     46       generating_stub_(false),
     47       allow_stub_calls_(true),
     48       has_frame_(false) {
     49   if (isolate() != NULL) {
     50     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
     51                                   isolate());
     52   }
     53 }
     54 
     55 
     56 void MacroAssembler::InNewSpace(
     57     Register object,
     58     Register scratch,
     59     Condition cc,
     60     Label* condition_met,
     61     Label::Distance condition_met_distance) {
     62   ASSERT(cc == equal || cc == not_equal);
     63   if (scratch.is(object)) {
     64     and_(scratch, Immediate(~Page::kPageAlignmentMask));
     65   } else {
     66     mov(scratch, Immediate(~Page::kPageAlignmentMask));
     67     and_(scratch, object);
     68   }
     69   // Check that we can use a test_b.
     70   ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
     71   ASSERT(MemoryChunk::IN_TO_SPACE < 8);
     72   int mask = (1 << MemoryChunk::IN_FROM_SPACE)
     73            | (1 << MemoryChunk::IN_TO_SPACE);
     74   // If non-zero, the page belongs to new-space.
     75   test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
     76          static_cast<uint8_t>(mask));
     77   j(cc, condition_met, condition_met_distance);
     78 }
     79 
     80 
     81 void MacroAssembler::RememberedSetHelper(
     82     Register object,  // Only used for debug checks.
     83     Register addr,
     84     Register scratch,
     85     SaveFPRegsMode save_fp,
     86     MacroAssembler::RememberedSetFinalAction and_then) {
     87   Label done;
     88   if (FLAG_debug_code) {
     89     Label ok;
     90     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
     91     int3();
     92     bind(&ok);
     93   }
     94   // Load store buffer top.
     95   ExternalReference store_buffer =
     96       ExternalReference::store_buffer_top(isolate());
     97   mov(scratch, Operand::StaticVariable(store_buffer));
     98   // Store pointer to buffer.
     99   mov(Operand(scratch, 0), addr);
    100   // Increment buffer top.
    101   add(scratch, Immediate(kPointerSize));
    102   // Write back new top of buffer.
    103   mov(Operand::StaticVariable(store_buffer), scratch);
    104   // Call stub on end of buffer.
    105   // Check for end of buffer.
    106   test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
    107   if (and_then == kReturnAtEnd) {
    108     Label buffer_overflowed;
    109     j(not_equal, &buffer_overflowed, Label::kNear);
    110     ret(0);
    111     bind(&buffer_overflowed);
    112   } else {
    113     ASSERT(and_then == kFallThroughAtEnd);
    114     j(equal, &done, Label::kNear);
    115   }
    116   StoreBufferOverflowStub store_buffer_overflow =
    117       StoreBufferOverflowStub(save_fp);
    118   CallStub(&store_buffer_overflow);
    119   if (and_then == kReturnAtEnd) {
    120     ret(0);
    121   } else {
    122     ASSERT(and_then == kFallThroughAtEnd);
    123     bind(&done);
    124   }
    125 }
    126 
    127 
    128 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
    129                                         XMMRegister scratch_reg,
    130                                         Register result_reg) {
    131   Label done;
    132   ExternalReference zero_ref = ExternalReference::address_of_zero();
    133   movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
    134   Set(result_reg, Immediate(0));
    135   ucomisd(input_reg, scratch_reg);
    136   j(below, &done, Label::kNear);
    137   ExternalReference half_ref = ExternalReference::address_of_one_half();
    138   movdbl(scratch_reg, Operand::StaticVariable(half_ref));
    139   addsd(scratch_reg, input_reg);
    140   cvttsd2si(result_reg, Operand(scratch_reg));
    141   test(result_reg, Immediate(0xFFFFFF00));
    142   j(zero, &done, Label::kNear);
    143   Set(result_reg, Immediate(255));
    144   bind(&done);
    145 }
    146 
    147 
    148 void MacroAssembler::ClampUint8(Register reg) {
    149   Label done;
    150   test(reg, Immediate(0xFFFFFF00));
    151   j(zero, &done, Label::kNear);
    152   setcc(negative, reg);  // 1 if negative, 0 if positive.
    153   dec_b(reg);  // 0 if negative, 255 if positive.
    154   bind(&done);
    155 }
    156 
    157 
    158 void MacroAssembler::RecordWriteArray(Register object,
    159                                       Register value,
    160                                       Register index,
    161                                       SaveFPRegsMode save_fp,
    162                                       RememberedSetAction remembered_set_action,
    163                                       SmiCheck smi_check) {
    164   // First, check if a write barrier is even needed. The tests below
    165   // catch stores of Smis.
    166   Label done;
    167 
    168   // Skip barrier if writing a smi.
    169   if (smi_check == INLINE_SMI_CHECK) {
    170     ASSERT_EQ(0, kSmiTag);
    171     test(value, Immediate(kSmiTagMask));
    172     j(zero, &done);
    173   }
    174 
    175   // Array access: calculate the destination address in the same manner as
    176   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
    177   // into an array of words.
    178   Register dst = index;
    179   lea(dst, Operand(object, index, times_half_pointer_size,
    180                    FixedArray::kHeaderSize - kHeapObjectTag));
    181 
    182   RecordWrite(
    183       object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
    184 
    185   bind(&done);
    186 
    187   // Clobber clobbered input registers when running with the debug-code flag
    188   // turned on to provoke errors.
    189   if (emit_debug_code()) {
    190     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    191     mov(index, Immediate(BitCast<int32_t>(kZapValue)));
    192   }
    193 }
    194 
    195 
    196 void MacroAssembler::RecordWriteField(
    197     Register object,
    198     int offset,
    199     Register value,
    200     Register dst,
    201     SaveFPRegsMode save_fp,
    202     RememberedSetAction remembered_set_action,
    203     SmiCheck smi_check) {
    204   // First, check if a write barrier is even needed. The tests below
    205   // catch stores of Smis.
    206   Label done;
    207 
    208   // Skip barrier if writing a smi.
    209   if (smi_check == INLINE_SMI_CHECK) {
    210     JumpIfSmi(value, &done, Label::kNear);
    211   }
    212 
    213   // Although the object register is tagged, the offset is relative to the start
    214   // of the object, so so offset must be a multiple of kPointerSize.
    215   ASSERT(IsAligned(offset, kPointerSize));
    216 
    217   lea(dst, FieldOperand(object, offset));
    218   if (emit_debug_code()) {
    219     Label ok;
    220     test_b(dst, (1 << kPointerSizeLog2) - 1);
    221     j(zero, &ok, Label::kNear);
    222     int3();
    223     bind(&ok);
    224   }
    225 
    226   RecordWrite(
    227       object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
    228 
    229   bind(&done);
    230 
    231   // Clobber clobbered input registers when running with the debug-code flag
    232   // turned on to provoke errors.
    233   if (emit_debug_code()) {
    234     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    235     mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
    236   }
    237 }
    238 
    239 
    240 void MacroAssembler::RecordWrite(Register object,
    241                                  Register address,
    242                                  Register value,
    243                                  SaveFPRegsMode fp_mode,
    244                                  RememberedSetAction remembered_set_action,
    245                                  SmiCheck smi_check) {
    246   ASSERT(!object.is(value));
    247   ASSERT(!object.is(address));
    248   ASSERT(!value.is(address));
    249   if (emit_debug_code()) {
    250     AbortIfSmi(object);
    251   }
    252 
    253   if (remembered_set_action == OMIT_REMEMBERED_SET &&
    254       !FLAG_incremental_marking) {
    255     return;
    256   }
    257 
    258   if (FLAG_debug_code) {
    259     Label ok;
    260     cmp(value, Operand(address, 0));
    261     j(equal, &ok, Label::kNear);
    262     int3();
    263     bind(&ok);
    264   }
    265 
    266   // First, check if a write barrier is even needed. The tests below
    267   // catch stores of Smis and stores into young gen.
    268   Label done;
    269 
    270   if (smi_check == INLINE_SMI_CHECK) {
    271     // Skip barrier if writing a smi.
    272     JumpIfSmi(value, &done, Label::kNear);
    273   }
    274 
    275   CheckPageFlag(value,
    276                 value,  // Used as scratch.
    277                 MemoryChunk::kPointersToHereAreInterestingMask,
    278                 zero,
    279                 &done,
    280                 Label::kNear);
    281   CheckPageFlag(object,
    282                 value,  // Used as scratch.
    283                 MemoryChunk::kPointersFromHereAreInterestingMask,
    284                 zero,
    285                 &done,
    286                 Label::kNear);
    287 
    288   RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
    289   CallStub(&stub);
    290 
    291   bind(&done);
    292 
    293   // Clobber clobbered registers when running with the debug-code flag
    294   // turned on to provoke errors.
    295   if (emit_debug_code()) {
    296     mov(address, Immediate(BitCast<int32_t>(kZapValue)));
    297     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    298   }
    299 }
    300 
    301 
    302 #ifdef ENABLE_DEBUGGER_SUPPORT
    303 void MacroAssembler::DebugBreak() {
    304   Set(eax, Immediate(0));
    305   mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
    306   CEntryStub ces(1);
    307   call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
    308 }
    309 #endif
    310 
    311 
    312 void MacroAssembler::Set(Register dst, const Immediate& x) {
    313   if (x.is_zero()) {
    314     xor_(dst, dst);  // Shorter than mov.
    315   } else {
    316     mov(dst, x);
    317   }
    318 }
    319 
    320 
    321 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
    322   mov(dst, x);
    323 }
    324 
    325 
    326 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
    327   static const int kMaxImmediateBits = 17;
    328   if (x.rmode_ != RelocInfo::NONE) return false;
    329   return !is_intn(x.x_, kMaxImmediateBits);
    330 }
    331 
    332 
    333 void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
    334   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    335     Set(dst, Immediate(x.x_ ^ jit_cookie()));
    336     xor_(dst, jit_cookie());
    337   } else {
    338     Set(dst, x);
    339   }
    340 }
    341 
    342 
    343 void MacroAssembler::SafePush(const Immediate& x) {
    344   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    345     push(Immediate(x.x_ ^ jit_cookie()));
    346     xor_(Operand(esp, 0), Immediate(jit_cookie()));
    347   } else {
    348     push(x);
    349   }
    350 }
    351 
    352 
    353 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
    354   // see ROOT_ACCESSOR macro in factory.h
    355   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
    356   cmp(with, value);
    357 }
    358 
    359 
    360 void MacroAssembler::CompareRoot(const Operand& with,
    361                                  Heap::RootListIndex index) {
    362   // see ROOT_ACCESSOR macro in factory.h
    363   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
    364   cmp(with, value);
    365 }
    366 
    367 
    368 void MacroAssembler::CmpObjectType(Register heap_object,
    369                                    InstanceType type,
    370                                    Register map) {
    371   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    372   CmpInstanceType(map, type);
    373 }
    374 
    375 
    376 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    377   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
    378        static_cast<int8_t>(type));
    379 }
    380 
    381 
    382 void MacroAssembler::CheckFastElements(Register map,
    383                                        Label* fail,
    384                                        Label::Distance distance) {
    385   STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
    386   STATIC_ASSERT(FAST_ELEMENTS == 1);
    387   cmpb(FieldOperand(map, Map::kBitField2Offset),
    388        Map::kMaximumBitField2FastElementValue);
    389   j(above, fail, distance);
    390 }
    391 
    392 
    393 void MacroAssembler::CheckFastObjectElements(Register map,
    394                                              Label* fail,
    395                                              Label::Distance distance) {
    396   STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
    397   STATIC_ASSERT(FAST_ELEMENTS == 1);
    398   cmpb(FieldOperand(map, Map::kBitField2Offset),
    399        Map::kMaximumBitField2FastSmiOnlyElementValue);
    400   j(below_equal, fail, distance);
    401   cmpb(FieldOperand(map, Map::kBitField2Offset),
    402        Map::kMaximumBitField2FastElementValue);
    403   j(above, fail, distance);
    404 }
    405 
    406 
    407 void MacroAssembler::CheckFastSmiOnlyElements(Register map,
    408                                               Label* fail,
    409                                               Label::Distance distance) {
    410   STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
    411   cmpb(FieldOperand(map, Map::kBitField2Offset),
    412        Map::kMaximumBitField2FastSmiOnlyElementValue);
    413   j(above, fail, distance);
    414 }
    415 
    416 
    417 void MacroAssembler::StoreNumberToDoubleElements(
    418     Register maybe_number,
    419     Register elements,
    420     Register key,
    421     Register scratch1,
    422     XMMRegister scratch2,
    423     Label* fail,
    424     bool specialize_for_processor) {
    425   Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
    426   JumpIfSmi(maybe_number, &smi_value, Label::kNear);
    427 
    428   CheckMap(maybe_number,
    429            isolate()->factory()->heap_number_map(),
    430            fail,
    431            DONT_DO_SMI_CHECK);
    432 
    433   // Double value, canonicalize NaN.
    434   uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
    435   cmp(FieldOperand(maybe_number, offset),
    436       Immediate(kNaNOrInfinityLowerBoundUpper32));
    437   j(greater_equal, &maybe_nan, Label::kNear);
    438 
    439   bind(&not_nan);
    440   ExternalReference canonical_nan_reference =
    441       ExternalReference::address_of_canonical_non_hole_nan();
    442   if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
    443     CpuFeatures::Scope use_sse2(SSE2);
    444     movdbl(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
    445     bind(&have_double_value);
    446     movdbl(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize),
    447            scratch2);
    448   } else {
    449     fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
    450     bind(&have_double_value);
    451     fstp_d(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize));
    452   }
    453   jmp(&done);
    454 
    455   bind(&maybe_nan);
    456   // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
    457   // it's an Infinity, and the non-NaN code path applies.
    458   j(greater, &is_nan, Label::kNear);
    459   cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
    460   j(zero, &not_nan);
    461   bind(&is_nan);
    462   if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
    463     CpuFeatures::Scope use_sse2(SSE2);
    464     movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
    465   } else {
    466     fld_d(Operand::StaticVariable(canonical_nan_reference));
    467   }
    468   jmp(&have_double_value, Label::kNear);
    469 
    470   bind(&smi_value);
    471   // Value is a smi. Convert to a double and store.
    472   // Preserve original value.
    473   mov(scratch1, maybe_number);
    474   SmiUntag(scratch1);
    475   if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
    476     CpuFeatures::Scope fscope(SSE2);
    477     cvtsi2sd(scratch2, scratch1);
    478     movdbl(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize),
    479            scratch2);
    480   } else {
    481     push(scratch1);
    482     fild_s(Operand(esp, 0));
    483     pop(scratch1);
    484     fstp_d(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize));
    485   }
    486   bind(&done);
    487 }
    488 
    489 
    490 void MacroAssembler::CompareMap(Register obj,
    491                                 Handle<Map> map,
    492                                 Label* early_success,
    493                                 CompareMapMode mode) {
    494   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
    495   if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
    496     Map* transitioned_fast_element_map(
    497         map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
    498     ASSERT(transitioned_fast_element_map == NULL ||
    499            map->elements_kind() != FAST_ELEMENTS);
    500     if (transitioned_fast_element_map != NULL) {
    501       j(equal, early_success, Label::kNear);
    502       cmp(FieldOperand(obj, HeapObject::kMapOffset),
    503           Handle<Map>(transitioned_fast_element_map));
    504     }
    505 
    506     Map* transitioned_double_map(
    507         map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
    508     ASSERT(transitioned_double_map == NULL ||
    509            map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
    510     if (transitioned_double_map != NULL) {
    511       j(equal, early_success, Label::kNear);
    512       cmp(FieldOperand(obj, HeapObject::kMapOffset),
    513           Handle<Map>(transitioned_double_map));
    514     }
    515   }
    516 }
    517 
    518 
    519 void MacroAssembler::CheckMap(Register obj,
    520                               Handle<Map> map,
    521                               Label* fail,
    522                               SmiCheckType smi_check_type,
    523                               CompareMapMode mode) {
    524   if (smi_check_type == DO_SMI_CHECK) {
    525     JumpIfSmi(obj, fail);
    526   }
    527 
    528   Label success;
    529   CompareMap(obj, map, &success, mode);
    530   j(not_equal, fail);
    531   bind(&success);
    532 }
    533 
    534 
    535 void MacroAssembler::DispatchMap(Register obj,
    536                                  Handle<Map> map,
    537                                  Handle<Code> success,
    538                                  SmiCheckType smi_check_type) {
    539   Label fail;
    540   if (smi_check_type == DO_SMI_CHECK) {
    541     JumpIfSmi(obj, &fail);
    542   }
    543   cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
    544   j(equal, success);
    545 
    546   bind(&fail);
    547 }
    548 
    549 
    550 Condition MacroAssembler::IsObjectStringType(Register heap_object,
    551                                              Register map,
    552                                              Register instance_type) {
    553   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    554   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    555   STATIC_ASSERT(kNotStringTag != 0);
    556   test(instance_type, Immediate(kIsNotStringMask));
    557   return zero;
    558 }
    559 
    560 
    561 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
    562                                           Register map,
    563                                           Register scratch,
    564                                           Label* fail) {
    565   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    566   IsInstanceJSObjectType(map, scratch, fail);
    567 }
    568 
    569 
    570 void MacroAssembler::IsInstanceJSObjectType(Register map,
    571                                             Register scratch,
    572                                             Label* fail) {
    573   movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
    574   sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
    575   cmp(scratch,
    576       LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
    577   j(above, fail);
    578 }
    579 
    580 
    581 void MacroAssembler::FCmp() {
    582   if (CpuFeatures::IsSupported(CMOV)) {
    583     fucomip();
    584     fstp(0);
    585   } else {
    586     fucompp();
    587     push(eax);
    588     fnstsw_ax();
    589     sahf();
    590     pop(eax);
    591   }
    592 }
    593 
    594 
    595 void MacroAssembler::AbortIfNotNumber(Register object) {
    596   Label ok;
    597   JumpIfSmi(object, &ok);
    598   cmp(FieldOperand(object, HeapObject::kMapOffset),
    599       isolate()->factory()->heap_number_map());
    600   Assert(equal, "Operand not a number");
    601   bind(&ok);
    602 }
    603 
    604 
    605 void MacroAssembler::AbortIfNotSmi(Register object) {
    606   test(object, Immediate(kSmiTagMask));
    607   Assert(equal, "Operand is not a smi");
    608 }
    609 
    610 
    611 void MacroAssembler::AbortIfNotString(Register object) {
    612   test(object, Immediate(kSmiTagMask));
    613   Assert(not_equal, "Operand is not a string");
    614   push(object);
    615   mov(object, FieldOperand(object, HeapObject::kMapOffset));
    616   CmpInstanceType(object, FIRST_NONSTRING_TYPE);
    617   pop(object);
    618   Assert(below, "Operand is not a string");
    619 }
    620 
    621 
    622 void MacroAssembler::AbortIfSmi(Register object) {
    623   test(object, Immediate(kSmiTagMask));
    624   Assert(not_equal, "Operand is a smi");
    625 }
    626 
    627 
    628 void MacroAssembler::EnterFrame(StackFrame::Type type) {
    629   push(ebp);
    630   mov(ebp, esp);
    631   push(esi);
    632   push(Immediate(Smi::FromInt(type)));
    633   push(Immediate(CodeObject()));
    634   if (emit_debug_code()) {
    635     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
    636     Check(not_equal, "code object not properly patched");
    637   }
    638 }
    639 
    640 
    641 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
    642   if (emit_debug_code()) {
    643     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
    644         Immediate(Smi::FromInt(type)));
    645     Check(equal, "stack frame types must match");
    646   }
    647   leave();
    648 }
    649 
    650 
    651 void MacroAssembler::EnterExitFramePrologue() {
    652   // Set up the frame structure on the stack.
    653   ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
    654   ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
    655   ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
    656   push(ebp);
    657   mov(ebp, esp);
    658 
    659   // Reserve room for entry stack pointer and push the code object.
    660   ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
    661   push(Immediate(0));  // Saved entry sp, patched before call.
    662   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
    663 
    664   // Save the frame pointer and the context in top.
    665   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
    666                                        isolate());
    667   ExternalReference context_address(Isolate::kContextAddress,
    668                                     isolate());
    669   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
    670   mov(Operand::StaticVariable(context_address), esi);
    671 }
    672 
    673 
    674 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
    675   // Optionally save all XMM registers.
    676   if (save_doubles) {
    677     CpuFeatures::Scope scope(SSE2);
    678     int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
    679     sub(esp, Immediate(space));
    680     const int offset = -2 * kPointerSize;
    681     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
    682       XMMRegister reg = XMMRegister::from_code(i);
    683       movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
    684     }
    685   } else {
    686     sub(esp, Immediate(argc * kPointerSize));
    687   }
    688 
    689   // Get the required frame alignment for the OS.
    690   const int kFrameAlignment = OS::ActivationFrameAlignment();
    691   if (kFrameAlignment > 0) {
    692     ASSERT(IsPowerOf2(kFrameAlignment));
    693     and_(esp, -kFrameAlignment);
    694   }
    695 
    696   // Patch the saved entry sp.
    697   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
    698 }
    699 
    700 
    701 void MacroAssembler::EnterExitFrame(bool save_doubles) {
    702   EnterExitFramePrologue();
    703 
    704   // Set up argc and argv in callee-saved registers.
    705   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
    706   mov(edi, eax);
    707   lea(esi, Operand(ebp, eax, times_4, offset));
    708 
    709   // Reserve space for argc, argv and isolate.
    710   EnterExitFrameEpilogue(3, save_doubles);
    711 }
    712 
    713 
    714 void MacroAssembler::EnterApiExitFrame(int argc) {
    715   EnterExitFramePrologue();
    716   EnterExitFrameEpilogue(argc, false);
    717 }
    718 
    719 
    720 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
    721   // Optionally restore all XMM registers.
    722   if (save_doubles) {
    723     CpuFeatures::Scope scope(SSE2);
    724     const int offset = -2 * kPointerSize;
    725     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
    726       XMMRegister reg = XMMRegister::from_code(i);
    727       movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
    728     }
    729   }
    730 
    731   // Get the return address from the stack and restore the frame pointer.
    732   mov(ecx, Operand(ebp, 1 * kPointerSize));
    733   mov(ebp, Operand(ebp, 0 * kPointerSize));
    734 
    735   // Pop the arguments and the receiver from the caller stack.
    736   lea(esp, Operand(esi, 1 * kPointerSize));
    737 
    738   // Push the return address to get ready to return.
    739   push(ecx);
    740 
    741   LeaveExitFrameEpilogue();
    742 }
    743 
    744 void MacroAssembler::LeaveExitFrameEpilogue() {
    745   // Restore current context from top and clear it in debug mode.
    746   ExternalReference context_address(Isolate::kContextAddress, isolate());
    747   mov(esi, Operand::StaticVariable(context_address));
    748 #ifdef DEBUG
    749   mov(Operand::StaticVariable(context_address), Immediate(0));
    750 #endif
    751 
    752   // Clear the top frame.
    753   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
    754                                        isolate());
    755   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
    756 }
    757 
    758 
    759 void MacroAssembler::LeaveApiExitFrame() {
    760   mov(esp, ebp);
    761   pop(ebp);
    762 
    763   LeaveExitFrameEpilogue();
    764 }
    765 
    766 
    767 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
    768                                     int handler_index) {
    769   // Adjust this code if not the case.
    770   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
    771   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    772   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
    773   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
    774   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
    775   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
    776 
    777   // We will build up the handler from the bottom by pushing on the stack.
    778   // First push the frame pointer and context.
    779   if (kind == StackHandler::JS_ENTRY) {
    780     // The frame pointer does not point to a JS frame so we save NULL for
    781     // ebp. We expect the code throwing an exception to check ebp before
    782     // dereferencing it to restore the context.
    783     push(Immediate(0));  // NULL frame pointer.
    784     push(Immediate(Smi::FromInt(0)));  // No context.
    785   } else {
    786     push(ebp);
    787     push(esi);
    788   }
    789   // Push the state and the code object.
    790   unsigned state =
    791       StackHandler::IndexField::encode(handler_index) |
    792       StackHandler::KindField::encode(kind);
    793   push(Immediate(state));
    794   Push(CodeObject());
    795 
    796   // Link the current handler as the next handler.
    797   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
    798   push(Operand::StaticVariable(handler_address));
    799   // Set this new handler as the current one.
    800   mov(Operand::StaticVariable(handler_address), esp);
    801 }
    802 
    803 
    804 void MacroAssembler::PopTryHandler() {
    805   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    806   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
    807   pop(Operand::StaticVariable(handler_address));
    808   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
    809 }
    810 
    811 
    812 void MacroAssembler::JumpToHandlerEntry() {
    813   // Compute the handler entry address and jump to it.  The handler table is
    814   // a fixed array of (smi-tagged) code offsets.
    815   // eax = exception, edi = code object, edx = state.
    816   mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
    817   shr(edx, StackHandler::kKindWidth);
    818   mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
    819   SmiUntag(edx);
    820   lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
    821   jmp(edi);
    822 }
    823 
    824 
    825 void MacroAssembler::Throw(Register value) {
    826   // Adjust this code if not the case.
    827   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
    828   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    829   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
    830   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
    831   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
    832   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
    833 
    834   // The exception is expected in eax.
    835   if (!value.is(eax)) {
    836     mov(eax, value);
    837   }
    838   // Drop the stack pointer to the top of the top handler.
    839   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
    840   mov(esp, Operand::StaticVariable(handler_address));
    841   // Restore the next handler.
    842   pop(Operand::StaticVariable(handler_address));
    843 
    844   // Remove the code object and state, compute the handler address in edi.
    845   pop(edi);  // Code object.
    846   pop(edx);  // Index and state.
    847 
    848   // Restore the context and frame pointer.
    849   pop(esi);  // Context.
    850   pop(ebp);  // Frame pointer.
    851 
    852   // If the handler is a JS frame, restore the context to the frame.
    853   // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
    854   // ebp or esi.
    855   Label skip;
    856   test(esi, esi);
    857   j(zero, &skip, Label::kNear);
    858   mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
    859   bind(&skip);
    860 
    861   JumpToHandlerEntry();
    862 }
    863 
    864 
    865 void MacroAssembler::ThrowUncatchable(Register value) {
    866   // Adjust this code if not the case.
    867   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
    868   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    869   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
    870   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
    871   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
    872   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
    873 
    874   // The exception is expected in eax.
    875   if (!value.is(eax)) {
    876     mov(eax, value);
    877   }
    878   // Drop the stack pointer to the top of the top stack handler.
    879   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
    880   mov(esp, Operand::StaticVariable(handler_address));
    881 
    882   // Unwind the handlers until the top ENTRY handler is found.
    883   Label fetch_next, check_kind;
    884   jmp(&check_kind, Label::kNear);
    885   bind(&fetch_next);
    886   mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
    887 
    888   bind(&check_kind);
    889   STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
    890   test(Operand(esp, StackHandlerConstants::kStateOffset),
    891        Immediate(StackHandler::KindField::kMask));
    892   j(not_zero, &fetch_next);
    893 
    894   // Set the top handler address to next handler past the top ENTRY handler.
    895   pop(Operand::StaticVariable(handler_address));
    896 
    897   // Remove the code object and state, compute the handler address in edi.
    898   pop(edi);  // Code object.
    899   pop(edx);  // Index and state.
    900 
    901   // Clear the context pointer and frame pointer (0 was saved in the handler).
    902   pop(esi);
    903   pop(ebp);
    904 
    905   JumpToHandlerEntry();
    906 }
    907 
    908 
    909 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
    910                                             Register scratch,
    911                                             Label* miss) {
    912   Label same_contexts;
    913 
    914   ASSERT(!holder_reg.is(scratch));
    915 
    916   // Load current lexical context from the stack frame.
    917   mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
    918 
    919   // When generating debug code, make sure the lexical context is set.
    920   if (emit_debug_code()) {
    921     cmp(scratch, Immediate(0));
    922     Check(not_equal, "we should not have an empty lexical context");
    923   }
    924   // Load the global context of the current context.
    925   int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
    926   mov(scratch, FieldOperand(scratch, offset));
    927   mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
    928 
    929   // Check the context is a global context.
    930   if (emit_debug_code()) {
    931     push(scratch);
    932     // Read the first word and compare to global_context_map.
    933     mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
    934     cmp(scratch, isolate()->factory()->global_context_map());
    935     Check(equal, "JSGlobalObject::global_context should be a global context.");
    936     pop(scratch);
    937   }
    938 
    939   // Check if both contexts are the same.
    940   cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
    941   j(equal, &same_contexts);
    942 
    943   // Compare security tokens, save holder_reg on the stack so we can use it
    944   // as a temporary register.
    945   //
    946   // TODO(119): avoid push(holder_reg)/pop(holder_reg)
    947   push(holder_reg);
    948   // Check that the security token in the calling global object is
    949   // compatible with the security token in the receiving global
    950   // object.
    951   mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
    952 
    953   // Check the context is a global context.
    954   if (emit_debug_code()) {
    955     cmp(holder_reg, isolate()->factory()->null_value());
    956     Check(not_equal, "JSGlobalProxy::context() should not be null.");
    957 
    958     push(holder_reg);
    959     // Read the first word and compare to global_context_map(),
    960     mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
    961     cmp(holder_reg, isolate()->factory()->global_context_map());
    962     Check(equal, "JSGlobalObject::global_context should be a global context.");
    963     pop(holder_reg);
    964   }
    965 
    966   int token_offset = Context::kHeaderSize +
    967                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
    968   mov(scratch, FieldOperand(scratch, token_offset));
    969   cmp(scratch, FieldOperand(holder_reg, token_offset));
    970   pop(holder_reg);
    971   j(not_equal, miss);
    972 
    973   bind(&same_contexts);
    974 }
    975 
    976 
    977 // Compute the hash code from the untagged key.  This must be kept in sync
    978 // with ComputeIntegerHash in utils.h.
    979 //
    980 // Note: r0 will contain hash code
    981 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
    982   // Xor original key with a seed.
    983   if (Serializer::enabled()) {
    984     ExternalReference roots_array_start =
    985         ExternalReference::roots_array_start(isolate());
    986     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
    987     mov(scratch,
    988         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
    989     SmiUntag(scratch);
    990     xor_(r0, scratch);
    991   } else {
    992     int32_t seed = isolate()->heap()->HashSeed();
    993     xor_(r0, Immediate(seed));
    994   }
    995 
    996   // hash = ~hash + (hash << 15);
    997   mov(scratch, r0);
    998   not_(r0);
    999   shl(scratch, 15);
   1000   add(r0, scratch);
   1001   // hash = hash ^ (hash >> 12);
   1002   mov(scratch, r0);
   1003   shr(scratch, 12);
   1004   xor_(r0, scratch);
   1005   // hash = hash + (hash << 2);
   1006   lea(r0, Operand(r0, r0, times_4, 0));
   1007   // hash = hash ^ (hash >> 4);
   1008   mov(scratch, r0);
   1009   shr(scratch, 4);
   1010   xor_(r0, scratch);
   1011   // hash = hash * 2057;
   1012   imul(r0, r0, 2057);
   1013   // hash = hash ^ (hash >> 16);
   1014   mov(scratch, r0);
   1015   shr(scratch, 16);
   1016   xor_(r0, scratch);
   1017 }
   1018 
   1019 
   1020 
   1021 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
   1022                                               Register elements,
   1023                                               Register key,
   1024                                               Register r0,
   1025                                               Register r1,
   1026                                               Register r2,
   1027                                               Register result) {
   1028   // Register use:
   1029   //
   1030   // elements - holds the slow-case elements of the receiver and is unchanged.
   1031   //
   1032   // key      - holds the smi key on entry and is unchanged.
   1033   //
   1034   // Scratch registers:
   1035   //
   1036   // r0 - holds the untagged key on entry and holds the hash once computed.
   1037   //
   1038   // r1 - used to hold the capacity mask of the dictionary
   1039   //
   1040   // r2 - used for the index into the dictionary.
   1041   //
   1042   // result - holds the result on exit if the load succeeds and we fall through.
   1043 
   1044   Label done;
   1045 
   1046   GetNumberHash(r0, r1);
   1047 
   1048   // Compute capacity mask.
   1049   mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
   1050   shr(r1, kSmiTagSize);  // convert smi to int
   1051   dec(r1);
   1052 
   1053   // Generate an unrolled loop that performs a few probes before giving up.
   1054   const int kProbes = 4;
   1055   for (int i = 0; i < kProbes; i++) {
   1056     // Use r2 for index calculations and keep the hash intact in r0.
   1057     mov(r2, r0);
   1058     // Compute the masked index: (hash + i + i * i) & mask.
   1059     if (i > 0) {
   1060       add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
   1061     }
   1062     and_(r2, r1);
   1063 
   1064     // Scale the index by multiplying by the entry size.
   1065     ASSERT(SeededNumberDictionary::kEntrySize == 3);
   1066     lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
   1067 
   1068     // Check if the key matches.
   1069     cmp(key, FieldOperand(elements,
   1070                           r2,
   1071                           times_pointer_size,
   1072                           SeededNumberDictionary::kElementsStartOffset));
   1073     if (i != (kProbes - 1)) {
   1074       j(equal, &done);
   1075     } else {
   1076       j(not_equal, miss);
   1077     }
   1078   }
   1079 
   1080   bind(&done);
   1081   // Check that the value is a normal propety.
   1082   const int kDetailsOffset =
   1083       SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
   1084   ASSERT_EQ(NORMAL, 0);
   1085   test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
   1086        Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
   1087   j(not_zero, miss);
   1088 
   1089   // Get the value at the masked, scaled index.
   1090   const int kValueOffset =
   1091       SeededNumberDictionary::kElementsStartOffset + kPointerSize;
   1092   mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
   1093 }
   1094 
   1095 
   1096 void MacroAssembler::LoadAllocationTopHelper(Register result,
   1097                                              Register scratch,
   1098                                              AllocationFlags flags) {
   1099   ExternalReference new_space_allocation_top =
   1100       ExternalReference::new_space_allocation_top_address(isolate());
   1101 
   1102   // Just return if allocation top is already known.
   1103   if ((flags & RESULT_CONTAINS_TOP) != 0) {
   1104     // No use of scratch if allocation top is provided.
   1105     ASSERT(scratch.is(no_reg));
   1106 #ifdef DEBUG
   1107     // Assert that result actually contains top on entry.
   1108     cmp(result, Operand::StaticVariable(new_space_allocation_top));
   1109     Check(equal, "Unexpected allocation top");
   1110 #endif
   1111     return;
   1112   }
   1113 
   1114   // Move address of new object to result. Use scratch register if available.
   1115   if (scratch.is(no_reg)) {
   1116     mov(result, Operand::StaticVariable(new_space_allocation_top));
   1117   } else {
   1118     mov(scratch, Immediate(new_space_allocation_top));
   1119     mov(result, Operand(scratch, 0));
   1120   }
   1121 }
   1122 
   1123 
   1124 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
   1125                                                Register scratch) {
   1126   if (emit_debug_code()) {
   1127     test(result_end, Immediate(kObjectAlignmentMask));
   1128     Check(zero, "Unaligned allocation in new space");
   1129   }
   1130 
   1131   ExternalReference new_space_allocation_top =
   1132       ExternalReference::new_space_allocation_top_address(isolate());
   1133 
   1134   // Update new top. Use scratch if available.
   1135   if (scratch.is(no_reg)) {
   1136     mov(Operand::StaticVariable(new_space_allocation_top), result_end);
   1137   } else {
   1138     mov(Operand(scratch, 0), result_end);
   1139   }
   1140 }
   1141 
   1142 
   1143 void MacroAssembler::AllocateInNewSpace(int object_size,
   1144                                         Register result,
   1145                                         Register result_end,
   1146                                         Register scratch,
   1147                                         Label* gc_required,
   1148                                         AllocationFlags flags) {
   1149   if (!FLAG_inline_new) {
   1150     if (emit_debug_code()) {
   1151       // Trash the registers to simulate an allocation failure.
   1152       mov(result, Immediate(0x7091));
   1153       if (result_end.is_valid()) {
   1154         mov(result_end, Immediate(0x7191));
   1155       }
   1156       if (scratch.is_valid()) {
   1157         mov(scratch, Immediate(0x7291));
   1158       }
   1159     }
   1160     jmp(gc_required);
   1161     return;
   1162   }
   1163   ASSERT(!result.is(result_end));
   1164 
   1165   // Load address of new object into result.
   1166   LoadAllocationTopHelper(result, scratch, flags);
   1167 
   1168   Register top_reg = result_end.is_valid() ? result_end : result;
   1169 
   1170   // Calculate new top and bail out if new space is exhausted.
   1171   ExternalReference new_space_allocation_limit =
   1172       ExternalReference::new_space_allocation_limit_address(isolate());
   1173 
   1174   if (!top_reg.is(result)) {
   1175     mov(top_reg, result);
   1176   }
   1177   add(top_reg, Immediate(object_size));
   1178   j(carry, gc_required);
   1179   cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
   1180   j(above, gc_required);
   1181 
   1182   // Update allocation top.
   1183   UpdateAllocationTopHelper(top_reg, scratch);
   1184 
   1185   // Tag result if requested.
   1186   if (top_reg.is(result)) {
   1187     if ((flags & TAG_OBJECT) != 0) {
   1188       sub(result, Immediate(object_size - kHeapObjectTag));
   1189     } else {
   1190       sub(result, Immediate(object_size));
   1191     }
   1192   } else if ((flags & TAG_OBJECT) != 0) {
   1193     add(result, Immediate(kHeapObjectTag));
   1194   }
   1195 }
   1196 
   1197 
   1198 void MacroAssembler::AllocateInNewSpace(int header_size,
   1199                                         ScaleFactor element_size,
   1200                                         Register element_count,
   1201                                         Register result,
   1202                                         Register result_end,
   1203                                         Register scratch,
   1204                                         Label* gc_required,
   1205                                         AllocationFlags flags) {
   1206   if (!FLAG_inline_new) {
   1207     if (emit_debug_code()) {
   1208       // Trash the registers to simulate an allocation failure.
   1209       mov(result, Immediate(0x7091));
   1210       mov(result_end, Immediate(0x7191));
   1211       if (scratch.is_valid()) {
   1212         mov(scratch, Immediate(0x7291));
   1213       }
   1214       // Register element_count is not modified by the function.
   1215     }
   1216     jmp(gc_required);
   1217     return;
   1218   }
   1219   ASSERT(!result.is(result_end));
   1220 
   1221   // Load address of new object into result.
   1222   LoadAllocationTopHelper(result, scratch, flags);
   1223 
   1224   // Calculate new top and bail out if new space is exhausted.
   1225   ExternalReference new_space_allocation_limit =
   1226       ExternalReference::new_space_allocation_limit_address(isolate());
   1227 
   1228   // We assume that element_count*element_size + header_size does not
   1229   // overflow.
   1230   lea(result_end, Operand(element_count, element_size, header_size));
   1231   add(result_end, result);
   1232   j(carry, gc_required);
   1233   cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
   1234   j(above, gc_required);
   1235 
   1236   // Tag result if requested.
   1237   if ((flags & TAG_OBJECT) != 0) {
   1238     lea(result, Operand(result, kHeapObjectTag));
   1239   }
   1240 
   1241   // Update allocation top.
   1242   UpdateAllocationTopHelper(result_end, scratch);
   1243 }
   1244 
   1245 
   1246 void MacroAssembler::AllocateInNewSpace(Register object_size,
   1247                                         Register result,
   1248                                         Register result_end,
   1249                                         Register scratch,
   1250                                         Label* gc_required,
   1251                                         AllocationFlags flags) {
   1252   if (!FLAG_inline_new) {
   1253     if (emit_debug_code()) {
   1254       // Trash the registers to simulate an allocation failure.
   1255       mov(result, Immediate(0x7091));
   1256       mov(result_end, Immediate(0x7191));
   1257       if (scratch.is_valid()) {
   1258         mov(scratch, Immediate(0x7291));
   1259       }
   1260       // object_size is left unchanged by this function.
   1261     }
   1262     jmp(gc_required);
   1263     return;
   1264   }
   1265   ASSERT(!result.is(result_end));
   1266 
   1267   // Load address of new object into result.
   1268   LoadAllocationTopHelper(result, scratch, flags);
   1269 
   1270   // Calculate new top and bail out if new space is exhausted.
   1271   ExternalReference new_space_allocation_limit =
   1272       ExternalReference::new_space_allocation_limit_address(isolate());
   1273   if (!object_size.is(result_end)) {
   1274     mov(result_end, object_size);
   1275   }
   1276   add(result_end, result);
   1277   j(carry, gc_required);
   1278   cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
   1279   j(above, gc_required);
   1280 
   1281   // Tag result if requested.
   1282   if ((flags & TAG_OBJECT) != 0) {
   1283     lea(result, Operand(result, kHeapObjectTag));
   1284   }
   1285 
   1286   // Update allocation top.
   1287   UpdateAllocationTopHelper(result_end, scratch);
   1288 }
   1289 
   1290 
   1291 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
   1292   ExternalReference new_space_allocation_top =
   1293       ExternalReference::new_space_allocation_top_address(isolate());
   1294 
   1295   // Make sure the object has no tag before resetting top.
   1296   and_(object, Immediate(~kHeapObjectTagMask));
   1297 #ifdef DEBUG
   1298   cmp(object, Operand::StaticVariable(new_space_allocation_top));
   1299   Check(below, "Undo allocation of non allocated memory");
   1300 #endif
   1301   mov(Operand::StaticVariable(new_space_allocation_top), object);
   1302 }
   1303 
   1304 
   1305 void MacroAssembler::AllocateHeapNumber(Register result,
   1306                                         Register scratch1,
   1307                                         Register scratch2,
   1308                                         Label* gc_required) {
   1309   // Allocate heap number in new space.
   1310   AllocateInNewSpace(HeapNumber::kSize,
   1311                      result,
   1312                      scratch1,
   1313                      scratch2,
   1314                      gc_required,
   1315                      TAG_OBJECT);
   1316 
   1317   // Set the map.
   1318   mov(FieldOperand(result, HeapObject::kMapOffset),
   1319       Immediate(isolate()->factory()->heap_number_map()));
   1320 }
   1321 
   1322 
   1323 void MacroAssembler::AllocateTwoByteString(Register result,
   1324                                            Register length,
   1325                                            Register scratch1,
   1326                                            Register scratch2,
   1327                                            Register scratch3,
   1328                                            Label* gc_required) {
   1329   // Calculate the number of bytes needed for the characters in the string while
   1330   // observing object alignment.
   1331   ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
   1332   ASSERT(kShortSize == 2);
   1333   // scratch1 = length * 2 + kObjectAlignmentMask.
   1334   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
   1335   and_(scratch1, Immediate(~kObjectAlignmentMask));
   1336 
   1337   // Allocate two byte string in new space.
   1338   AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
   1339                      times_1,
   1340                      scratch1,
   1341                      result,
   1342                      scratch2,
   1343                      scratch3,
   1344                      gc_required,
   1345                      TAG_OBJECT);
   1346 
   1347   // Set the map, length and hash field.
   1348   mov(FieldOperand(result, HeapObject::kMapOffset),
   1349       Immediate(isolate()->factory()->string_map()));
   1350   mov(scratch1, length);
   1351   SmiTag(scratch1);
   1352   mov(FieldOperand(result, String::kLengthOffset), scratch1);
   1353   mov(FieldOperand(result, String::kHashFieldOffset),
   1354       Immediate(String::kEmptyHashField));
   1355 }
   1356 
   1357 
   1358 void MacroAssembler::AllocateAsciiString(Register result,
   1359                                          Register length,
   1360                                          Register scratch1,
   1361                                          Register scratch2,
   1362                                          Register scratch3,
   1363                                          Label* gc_required) {
   1364   // Calculate the number of bytes needed for the characters in the string while
   1365   // observing object alignment.
   1366   ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
   1367   mov(scratch1, length);
   1368   ASSERT(kCharSize == 1);
   1369   add(scratch1, Immediate(kObjectAlignmentMask));
   1370   and_(scratch1, Immediate(~kObjectAlignmentMask));
   1371 
   1372   // Allocate ASCII string in new space.
   1373   AllocateInNewSpace(SeqAsciiString::kHeaderSize,
   1374                      times_1,
   1375                      scratch1,
   1376                      result,
   1377                      scratch2,
   1378                      scratch3,
   1379                      gc_required,
   1380                      TAG_OBJECT);
   1381 
   1382   // Set the map, length and hash field.
   1383   mov(FieldOperand(result, HeapObject::kMapOffset),
   1384       Immediate(isolate()->factory()->ascii_string_map()));
   1385   mov(scratch1, length);
   1386   SmiTag(scratch1);
   1387   mov(FieldOperand(result, String::kLengthOffset), scratch1);
   1388   mov(FieldOperand(result, String::kHashFieldOffset),
   1389       Immediate(String::kEmptyHashField));
   1390 }
   1391 
   1392 
   1393 void MacroAssembler::AllocateAsciiString(Register result,
   1394                                          int length,
   1395                                          Register scratch1,
   1396                                          Register scratch2,
   1397                                          Label* gc_required) {
   1398   ASSERT(length > 0);
   1399 
   1400   // Allocate ASCII string in new space.
   1401   AllocateInNewSpace(SeqAsciiString::SizeFor(length),
   1402                      result,
   1403                      scratch1,
   1404                      scratch2,
   1405                      gc_required,
   1406                      TAG_OBJECT);
   1407 
   1408   // Set the map, length and hash field.
   1409   mov(FieldOperand(result, HeapObject::kMapOffset),
   1410       Immediate(isolate()->factory()->ascii_string_map()));
   1411   mov(FieldOperand(result, String::kLengthOffset),
   1412       Immediate(Smi::FromInt(length)));
   1413   mov(FieldOperand(result, String::kHashFieldOffset),
   1414       Immediate(String::kEmptyHashField));
   1415 }
   1416 
   1417 
   1418 void MacroAssembler::AllocateTwoByteConsString(Register result,
   1419                                         Register scratch1,
   1420                                         Register scratch2,
   1421                                         Label* gc_required) {
   1422   // Allocate heap number in new space.
   1423   AllocateInNewSpace(ConsString::kSize,
   1424                      result,
   1425                      scratch1,
   1426                      scratch2,
   1427                      gc_required,
   1428                      TAG_OBJECT);
   1429 
   1430   // Set the map. The other fields are left uninitialized.
   1431   mov(FieldOperand(result, HeapObject::kMapOffset),
   1432       Immediate(isolate()->factory()->cons_string_map()));
   1433 }
   1434 
   1435 
   1436 void MacroAssembler::AllocateAsciiConsString(Register result,
   1437                                              Register scratch1,
   1438                                              Register scratch2,
   1439                                              Label* gc_required) {
   1440   // Allocate heap number in new space.
   1441   AllocateInNewSpace(ConsString::kSize,
   1442                      result,
   1443                      scratch1,
   1444                      scratch2,
   1445                      gc_required,
   1446                      TAG_OBJECT);
   1447 
   1448   // Set the map. The other fields are left uninitialized.
   1449   mov(FieldOperand(result, HeapObject::kMapOffset),
   1450       Immediate(isolate()->factory()->cons_ascii_string_map()));
   1451 }
   1452 
   1453 
   1454 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
   1455                                           Register scratch1,
   1456                                           Register scratch2,
   1457                                           Label* gc_required) {
   1458   // Allocate heap number in new space.
   1459   AllocateInNewSpace(SlicedString::kSize,
   1460                      result,
   1461                      scratch1,
   1462                      scratch2,
   1463                      gc_required,
   1464                      TAG_OBJECT);
   1465 
   1466   // Set the map. The other fields are left uninitialized.
   1467   mov(FieldOperand(result, HeapObject::kMapOffset),
   1468       Immediate(isolate()->factory()->sliced_string_map()));
   1469 }
   1470 
   1471 
   1472 void MacroAssembler::AllocateAsciiSlicedString(Register result,
   1473                                                Register scratch1,
   1474                                                Register scratch2,
   1475                                                Label* gc_required) {
   1476   // Allocate heap number in new space.
   1477   AllocateInNewSpace(SlicedString::kSize,
   1478                      result,
   1479                      scratch1,
   1480                      scratch2,
   1481                      gc_required,
   1482                      TAG_OBJECT);
   1483 
   1484   // Set the map. The other fields are left uninitialized.
   1485   mov(FieldOperand(result, HeapObject::kMapOffset),
   1486       Immediate(isolate()->factory()->sliced_ascii_string_map()));
   1487 }
   1488 
   1489 
   1490 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
   1491 // long or aligned copies.  The contents of scratch and length are destroyed.
   1492 // Source and destination are incremented by length.
   1493 // Many variants of movsb, loop unrolling, word moves, and indexed operands
   1494 // have been tried here already, and this is fastest.
   1495 // A simpler loop is faster on small copies, but 30% slower on large ones.
   1496 // The cld() instruction must have been emitted, to set the direction flag(),
   1497 // before calling this function.
   1498 void MacroAssembler::CopyBytes(Register source,
   1499                                Register destination,
   1500                                Register length,
   1501                                Register scratch) {
   1502   Label loop, done, short_string, short_loop;
   1503   // Experimentation shows that the short string loop is faster if length < 10.
   1504   cmp(length, Immediate(10));
   1505   j(less_equal, &short_string);
   1506 
   1507   ASSERT(source.is(esi));
   1508   ASSERT(destination.is(edi));
   1509   ASSERT(length.is(ecx));
   1510 
   1511   // Because source is 4-byte aligned in our uses of this function,
   1512   // we keep source aligned for the rep_movs call by copying the odd bytes
   1513   // at the end of the ranges.
   1514   mov(scratch, Operand(source, length, times_1, -4));
   1515   mov(Operand(destination, length, times_1, -4), scratch);
   1516   mov(scratch, ecx);
   1517   shr(ecx, 2);
   1518   rep_movs();
   1519   and_(scratch, Immediate(0x3));
   1520   add(destination, scratch);
   1521   jmp(&done);
   1522 
   1523   bind(&short_string);
   1524   test(length, length);
   1525   j(zero, &done);
   1526 
   1527   bind(&short_loop);
   1528   mov_b(scratch, Operand(source, 0));
   1529   mov_b(Operand(destination, 0), scratch);
   1530   inc(source);
   1531   inc(destination);
   1532   dec(length);
   1533   j(not_zero, &short_loop);
   1534 
   1535   bind(&done);
   1536 }
   1537 
   1538 
   1539 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
   1540                                                 Register end_offset,
   1541                                                 Register filler) {
   1542   Label loop, entry;
   1543   jmp(&entry);
   1544   bind(&loop);
   1545   mov(Operand(start_offset, 0), filler);
   1546   add(start_offset, Immediate(kPointerSize));
   1547   bind(&entry);
   1548   cmp(start_offset, end_offset);
   1549   j(less, &loop);
   1550 }
   1551 
   1552 
   1553 void MacroAssembler::BooleanBitTest(Register object,
   1554                                     int field_offset,
   1555                                     int bit_index) {
   1556   bit_index += kSmiTagSize + kSmiShiftSize;
   1557   ASSERT(IsPowerOf2(kBitsPerByte));
   1558   int byte_index = bit_index / kBitsPerByte;
   1559   int byte_bit_index = bit_index & (kBitsPerByte - 1);
   1560   test_b(FieldOperand(object, field_offset + byte_index),
   1561          static_cast<byte>(1 << byte_bit_index));
   1562 }
   1563 
   1564 
   1565 
   1566 void MacroAssembler::NegativeZeroTest(Register result,
   1567                                       Register op,
   1568                                       Label* then_label) {
   1569   Label ok;
   1570   test(result, result);
   1571   j(not_zero, &ok);
   1572   test(op, op);
   1573   j(sign, then_label);
   1574   bind(&ok);
   1575 }
   1576 
   1577 
   1578 void MacroAssembler::NegativeZeroTest(Register result,
   1579                                       Register op1,
   1580                                       Register op2,
   1581                                       Register scratch,
   1582                                       Label* then_label) {
   1583   Label ok;
   1584   test(result, result);
   1585   j(not_zero, &ok);
   1586   mov(scratch, op1);
   1587   or_(scratch, op2);
   1588   j(sign, then_label);
   1589   bind(&ok);
   1590 }
   1591 
   1592 
   1593 void MacroAssembler::TryGetFunctionPrototype(Register function,
   1594                                              Register result,
   1595                                              Register scratch,
   1596                                              Label* miss,
   1597                                              bool miss_on_bound_function) {
   1598   // Check that the receiver isn't a smi.
   1599   JumpIfSmi(function, miss);
   1600 
   1601   // Check that the function really is a function.
   1602   CmpObjectType(function, JS_FUNCTION_TYPE, result);
   1603   j(not_equal, miss);
   1604 
   1605   if (miss_on_bound_function) {
   1606     // If a bound function, go to miss label.
   1607     mov(scratch,
   1608         FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
   1609     BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
   1610                    SharedFunctionInfo::kBoundFunction);
   1611     j(not_zero, miss);
   1612   }
   1613 
   1614   // Make sure that the function has an instance prototype.
   1615   Label non_instance;
   1616   movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
   1617   test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
   1618   j(not_zero, &non_instance);
   1619 
   1620   // Get the prototype or initial map from the function.
   1621   mov(result,
   1622       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   1623 
   1624   // If the prototype or initial map is the hole, don't return it and
   1625   // simply miss the cache instead. This will allow us to allocate a
   1626   // prototype object on-demand in the runtime system.
   1627   cmp(result, Immediate(isolate()->factory()->the_hole_value()));
   1628   j(equal, miss);
   1629 
   1630   // If the function does not have an initial map, we're done.
   1631   Label done;
   1632   CmpObjectType(result, MAP_TYPE, scratch);
   1633   j(not_equal, &done);
   1634 
   1635   // Get the prototype from the initial map.
   1636   mov(result, FieldOperand(result, Map::kPrototypeOffset));
   1637   jmp(&done);
   1638 
   1639   // Non-instance prototype: Fetch prototype from constructor field
   1640   // in initial map.
   1641   bind(&non_instance);
   1642   mov(result, FieldOperand(result, Map::kConstructorOffset));
   1643 
   1644   // All done.
   1645   bind(&done);
   1646 }
   1647 
   1648 
   1649 void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
   1650   ASSERT(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
   1651   call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
   1652 }
   1653 
   1654 
   1655 void MacroAssembler::TailCallStub(CodeStub* stub) {
   1656   ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
   1657   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
   1658 }
   1659 
   1660 
   1661 void MacroAssembler::StubReturn(int argc) {
   1662   ASSERT(argc >= 1 && generating_stub());
   1663   ret((argc - 1) * kPointerSize);
   1664 }
   1665 
   1666 
   1667 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
   1668   if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
   1669   return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
   1670 }
   1671 
   1672 
   1673 void MacroAssembler::IllegalOperation(int num_arguments) {
   1674   if (num_arguments > 0) {
   1675     add(esp, Immediate(num_arguments * kPointerSize));
   1676   }
   1677   mov(eax, Immediate(isolate()->factory()->undefined_value()));
   1678 }
   1679 
   1680 
   1681 void MacroAssembler::IndexFromHash(Register hash, Register index) {
   1682   // The assert checks that the constants for the maximum number of digits
   1683   // for an array index cached in the hash field and the number of bits
   1684   // reserved for it does not conflict.
   1685   ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
   1686          (1 << String::kArrayIndexValueBits));
   1687   // We want the smi-tagged index in key.  kArrayIndexValueMask has zeros in
   1688   // the low kHashShift bits.
   1689   and_(hash, String::kArrayIndexValueMask);
   1690   STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
   1691   if (String::kHashShift > kSmiTagSize) {
   1692     shr(hash, String::kHashShift - kSmiTagSize);
   1693   }
   1694   if (!index.is(hash)) {
   1695     mov(index, hash);
   1696   }
   1697 }
   1698 
   1699 
   1700 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
   1701   CallRuntime(Runtime::FunctionForId(id), num_arguments);
   1702 }
   1703 
   1704 
   1705 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
   1706   const Runtime::Function* function = Runtime::FunctionForId(id);
   1707   Set(eax, Immediate(function->nargs));
   1708   mov(ebx, Immediate(ExternalReference(function, isolate())));
   1709   CEntryStub ces(1, kSaveFPRegs);
   1710   CallStub(&ces);
   1711 }
   1712 
   1713 
   1714 void MacroAssembler::CallRuntime(const Runtime::Function* f,
   1715                                  int num_arguments) {
   1716   // If the expected number of arguments of the runtime function is
   1717   // constant, we check that the actual number of arguments match the
   1718   // expectation.
   1719   if (f->nargs >= 0 && f->nargs != num_arguments) {
   1720     IllegalOperation(num_arguments);
   1721     return;
   1722   }
   1723 
   1724   // TODO(1236192): Most runtime routines don't need the number of
   1725   // arguments passed in because it is constant. At some point we
   1726   // should remove this need and make the runtime routine entry code
   1727   // smarter.
   1728   Set(eax, Immediate(num_arguments));
   1729   mov(ebx, Immediate(ExternalReference(f, isolate())));
   1730   CEntryStub ces(1);
   1731   CallStub(&ces);
   1732 }
   1733 
   1734 
   1735 void MacroAssembler::CallExternalReference(ExternalReference ref,
   1736                                            int num_arguments) {
   1737   mov(eax, Immediate(num_arguments));
   1738   mov(ebx, Immediate(ref));
   1739 
   1740   CEntryStub stub(1);
   1741   CallStub(&stub);
   1742 }
   1743 
   1744 
   1745 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
   1746                                                int num_arguments,
   1747                                                int result_size) {
   1748   // TODO(1236192): Most runtime routines don't need the number of
   1749   // arguments passed in because it is constant. At some point we
   1750   // should remove this need and make the runtime routine entry code
   1751   // smarter.
   1752   Set(eax, Immediate(num_arguments));
   1753   JumpToExternalReference(ext);
   1754 }
   1755 
   1756 
   1757 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
   1758                                      int num_arguments,
   1759                                      int result_size) {
   1760   TailCallExternalReference(ExternalReference(fid, isolate()),
   1761                             num_arguments,
   1762                             result_size);
   1763 }
   1764 
   1765 
   1766 // If true, a Handle<T> returned by value from a function with cdecl calling
   1767 // convention will be returned directly as a value of location_ field in a
   1768 // register eax.
   1769 // If false, it is returned as a pointer to a preallocated by caller memory
   1770 // region. Pointer to this region should be passed to a function as an
   1771 // implicit first argument.
   1772 #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
   1773 static const bool kReturnHandlesDirectly = true;
   1774 #else
   1775 static const bool kReturnHandlesDirectly = false;
   1776 #endif
   1777 
   1778 
   1779 Operand ApiParameterOperand(int index) {
   1780   return Operand(
   1781       esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
   1782 }
   1783 
   1784 
   1785 void MacroAssembler::PrepareCallApiFunction(int argc) {
   1786   if (kReturnHandlesDirectly) {
   1787     EnterApiExitFrame(argc);
   1788     // When handles are returned directly we don't have to allocate extra
   1789     // space for and pass an out parameter.
   1790     if (emit_debug_code()) {
   1791       mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
   1792     }
   1793   } else {
   1794     // We allocate two additional slots: return value and pointer to it.
   1795     EnterApiExitFrame(argc + 2);
   1796 
   1797     // The argument slots are filled as follows:
   1798     //
   1799     //   n + 1: output slot
   1800     //   n: arg n
   1801     //   ...
   1802     //   1: arg1
   1803     //   0: pointer to the output slot
   1804 
   1805     lea(esi, Operand(esp, (argc + 1) * kPointerSize));
   1806     mov(Operand(esp, 0 * kPointerSize), esi);
   1807     if (emit_debug_code()) {
   1808       mov(Operand(esi, 0), Immediate(0));
   1809     }
   1810   }
   1811 }
   1812 
   1813 
   1814 void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
   1815                                               int stack_space) {
   1816   ExternalReference next_address =
   1817       ExternalReference::handle_scope_next_address();
   1818   ExternalReference limit_address =
   1819       ExternalReference::handle_scope_limit_address();
   1820   ExternalReference level_address =
   1821       ExternalReference::handle_scope_level_address();
   1822 
   1823   // Allocate HandleScope in callee-save registers.
   1824   mov(ebx, Operand::StaticVariable(next_address));
   1825   mov(edi, Operand::StaticVariable(limit_address));
   1826   add(Operand::StaticVariable(level_address), Immediate(1));
   1827 
   1828   // Call the api function.
   1829   call(function_address, RelocInfo::RUNTIME_ENTRY);
   1830 
   1831   if (!kReturnHandlesDirectly) {
   1832     // PrepareCallApiFunction saved pointer to the output slot into
   1833     // callee-save register esi.
   1834     mov(eax, Operand(esi, 0));
   1835   }
   1836 
   1837   Label empty_handle;
   1838   Label prologue;
   1839   Label promote_scheduled_exception;
   1840   Label delete_allocated_handles;
   1841   Label leave_exit_frame;
   1842 
   1843   // Check if the result handle holds 0.
   1844   test(eax, eax);
   1845   j(zero, &empty_handle);
   1846   // It was non-zero.  Dereference to get the result value.
   1847   mov(eax, Operand(eax, 0));
   1848   bind(&prologue);
   1849   // No more valid handles (the result handle was the last one). Restore
   1850   // previous handle scope.
   1851   mov(Operand::StaticVariable(next_address), ebx);
   1852   sub(Operand::StaticVariable(level_address), Immediate(1));
   1853   Assert(above_equal, "Invalid HandleScope level");
   1854   cmp(edi, Operand::StaticVariable(limit_address));
   1855   j(not_equal, &delete_allocated_handles);
   1856   bind(&leave_exit_frame);
   1857 
   1858   // Check if the function scheduled an exception.
   1859   ExternalReference scheduled_exception_address =
   1860       ExternalReference::scheduled_exception_address(isolate());
   1861   cmp(Operand::StaticVariable(scheduled_exception_address),
   1862       Immediate(isolate()->factory()->the_hole_value()));
   1863   j(not_equal, &promote_scheduled_exception);
   1864   LeaveApiExitFrame();
   1865   ret(stack_space * kPointerSize);
   1866   bind(&promote_scheduled_exception);
   1867   TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
   1868 
   1869   bind(&empty_handle);
   1870   // It was zero; the result is undefined.
   1871   mov(eax, isolate()->factory()->undefined_value());
   1872   jmp(&prologue);
   1873 
   1874   // HandleScope limit has changed. Delete allocated extensions.
   1875   ExternalReference delete_extensions =
   1876       ExternalReference::delete_handle_scope_extensions(isolate());
   1877   bind(&delete_allocated_handles);
   1878   mov(Operand::StaticVariable(limit_address), edi);
   1879   mov(edi, eax);
   1880   mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
   1881   mov(eax, Immediate(delete_extensions));
   1882   call(eax);
   1883   mov(eax, edi);
   1884   jmp(&leave_exit_frame);
   1885 }
   1886 
   1887 
   1888 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
   1889   // Set the entry point and jump to the C entry runtime stub.
   1890   mov(ebx, Immediate(ext));
   1891   CEntryStub ces(1);
   1892   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
   1893 }
   1894 
   1895 
   1896 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
   1897   // This macro takes the dst register to make the code more readable
   1898   // at the call sites. However, the dst register has to be ecx to
   1899   // follow the calling convention which requires the call type to be
   1900   // in ecx.
   1901   ASSERT(dst.is(ecx));
   1902   if (call_kind == CALL_AS_FUNCTION) {
   1903     // Set to some non-zero smi by updating the least significant
   1904     // byte.
   1905     mov_b(dst, 1 << kSmiTagSize);
   1906   } else {
   1907     // Set to smi zero by clearing the register.
   1908     xor_(dst, dst);
   1909   }
   1910 }
   1911 
   1912 
   1913 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
   1914                                     const ParameterCount& actual,
   1915                                     Handle<Code> code_constant,
   1916                                     const Operand& code_operand,
   1917                                     Label* done,
   1918                                     bool* definitely_mismatches,
   1919                                     InvokeFlag flag,
   1920                                     Label::Distance done_near,
   1921                                     const CallWrapper& call_wrapper,
   1922                                     CallKind call_kind) {
   1923   bool definitely_matches = false;
   1924   *definitely_mismatches = false;
   1925   Label invoke;
   1926   if (expected.is_immediate()) {
   1927     ASSERT(actual.is_immediate());
   1928     if (expected.immediate() == actual.immediate()) {
   1929       definitely_matches = true;
   1930     } else {
   1931       mov(eax, actual.immediate());
   1932       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   1933       if (expected.immediate() == sentinel) {
   1934         // Don't worry about adapting arguments for builtins that
   1935         // don't want that done. Skip adaption code by making it look
   1936         // like we have a match between expected and actual number of
   1937         // arguments.
   1938         definitely_matches = true;
   1939       } else {
   1940         *definitely_mismatches = true;
   1941         mov(ebx, expected.immediate());
   1942       }
   1943     }
   1944   } else {
   1945     if (actual.is_immediate()) {
   1946       // Expected is in register, actual is immediate. This is the
   1947       // case when we invoke function values without going through the
   1948       // IC mechanism.
   1949       cmp(expected.reg(), actual.immediate());
   1950       j(equal, &invoke);
   1951       ASSERT(expected.reg().is(ebx));
   1952       mov(eax, actual.immediate());
   1953     } else if (!expected.reg().is(actual.reg())) {
   1954       // Both expected and actual are in (different) registers. This
   1955       // is the case when we invoke functions using call and apply.
   1956       cmp(expected.reg(), actual.reg());
   1957       j(equal, &invoke);
   1958       ASSERT(actual.reg().is(eax));
   1959       ASSERT(expected.reg().is(ebx));
   1960     }
   1961   }
   1962 
   1963   if (!definitely_matches) {
   1964     Handle<Code> adaptor =
   1965         isolate()->builtins()->ArgumentsAdaptorTrampoline();
   1966     if (!code_constant.is_null()) {
   1967       mov(edx, Immediate(code_constant));
   1968       add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
   1969     } else if (!code_operand.is_reg(edx)) {
   1970       mov(edx, code_operand);
   1971     }
   1972 
   1973     if (flag == CALL_FUNCTION) {
   1974       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
   1975       SetCallKind(ecx, call_kind);
   1976       call(adaptor, RelocInfo::CODE_TARGET);
   1977       call_wrapper.AfterCall();
   1978       if (!*definitely_mismatches) {
   1979         jmp(done, done_near);
   1980       }
   1981     } else {
   1982       SetCallKind(ecx, call_kind);
   1983       jmp(adaptor, RelocInfo::CODE_TARGET);
   1984     }
   1985     bind(&invoke);
   1986   }
   1987 }
   1988 
   1989 
   1990 void MacroAssembler::InvokeCode(const Operand& code,
   1991                                 const ParameterCount& expected,
   1992                                 const ParameterCount& actual,
   1993                                 InvokeFlag flag,
   1994                                 const CallWrapper& call_wrapper,
   1995                                 CallKind call_kind) {
   1996   // You can't call a function without a valid frame.
   1997   ASSERT(flag == JUMP_FUNCTION || has_frame());
   1998 
   1999   Label done;
   2000   bool definitely_mismatches = false;
   2001   InvokePrologue(expected, actual, Handle<Code>::null(), code,
   2002                  &done, &definitely_mismatches, flag, Label::kNear,
   2003                  call_wrapper, call_kind);
   2004   if (!definitely_mismatches) {
   2005     if (flag == CALL_FUNCTION) {
   2006       call_wrapper.BeforeCall(CallSize(code));
   2007       SetCallKind(ecx, call_kind);
   2008       call(code);
   2009       call_wrapper.AfterCall();
   2010     } else {
   2011       ASSERT(flag == JUMP_FUNCTION);
   2012       SetCallKind(ecx, call_kind);
   2013       jmp(code);
   2014     }
   2015     bind(&done);
   2016   }
   2017 }
   2018 
   2019 
   2020 void MacroAssembler::InvokeCode(Handle<Code> code,
   2021                                 const ParameterCount& expected,
   2022                                 const ParameterCount& actual,
   2023                                 RelocInfo::Mode rmode,
   2024                                 InvokeFlag flag,
   2025                                 const CallWrapper& call_wrapper,
   2026                                 CallKind call_kind) {
   2027   // You can't call a function without a valid frame.
   2028   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2029 
   2030   Label done;
   2031   Operand dummy(eax, 0);
   2032   bool definitely_mismatches = false;
   2033   InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
   2034                  flag, Label::kNear, call_wrapper, call_kind);
   2035   if (!definitely_mismatches) {
   2036     if (flag == CALL_FUNCTION) {
   2037       call_wrapper.BeforeCall(CallSize(code, rmode));
   2038       SetCallKind(ecx, call_kind);
   2039       call(code, rmode);
   2040       call_wrapper.AfterCall();
   2041     } else {
   2042       ASSERT(flag == JUMP_FUNCTION);
   2043       SetCallKind(ecx, call_kind);
   2044       jmp(code, rmode);
   2045     }
   2046     bind(&done);
   2047   }
   2048 }
   2049 
   2050 
   2051 void MacroAssembler::InvokeFunction(Register fun,
   2052                                     const ParameterCount& actual,
   2053                                     InvokeFlag flag,
   2054                                     const CallWrapper& call_wrapper,
   2055                                     CallKind call_kind) {
   2056   // You can't call a function without a valid frame.
   2057   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2058 
   2059   ASSERT(fun.is(edi));
   2060   mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2061   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2062   mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
   2063   SmiUntag(ebx);
   2064 
   2065   ParameterCount expected(ebx);
   2066   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   2067              expected, actual, flag, call_wrapper, call_kind);
   2068 }
   2069 
   2070 
   2071 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
   2072                                     const ParameterCount& actual,
   2073                                     InvokeFlag flag,
   2074                                     const CallWrapper& call_wrapper,
   2075                                     CallKind call_kind) {
   2076   // You can't call a function without a valid frame.
   2077   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2078 
   2079   // Get the function and setup the context.
   2080   LoadHeapObject(edi, function);
   2081   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2082 
   2083   ParameterCount expected(function->shared()->formal_parameter_count());
   2084   // We call indirectly through the code field in the function to
   2085   // allow recompilation to take effect without changing any of the
   2086   // call sites.
   2087   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   2088              expected, actual, flag, call_wrapper, call_kind);
   2089 }
   2090 
   2091 
   2092 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
   2093                                    InvokeFlag flag,
   2094                                    const CallWrapper& call_wrapper) {
   2095   // You can't call a builtin without a valid frame.
   2096   ASSERT(flag == JUMP_FUNCTION || has_frame());
   2097 
   2098   // Rely on the assertion to check that the number of provided
   2099   // arguments match the expected number of arguments. Fake a
   2100   // parameter count to avoid emitting code to do the check.
   2101   ParameterCount expected(0);
   2102   GetBuiltinFunction(edi, id);
   2103   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   2104              expected, expected, flag, call_wrapper, CALL_AS_METHOD);
   2105 }
   2106 
   2107 
   2108 void MacroAssembler::GetBuiltinFunction(Register target,
   2109                                         Builtins::JavaScript id) {
   2110   // Load the JavaScript builtin function from the builtins object.
   2111   mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
   2112   mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
   2113   mov(target, FieldOperand(target,
   2114                            JSBuiltinsObject::OffsetOfFunctionWithId(id)));
   2115 }
   2116 
   2117 
   2118 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
   2119   ASSERT(!target.is(edi));
   2120   // Load the JavaScript builtin function from the builtins object.
   2121   GetBuiltinFunction(edi, id);
   2122   // Load the code entry point from the function into the target register.
   2123   mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
   2124 }
   2125 
   2126 
   2127 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
   2128   if (context_chain_length > 0) {
   2129     // Move up the chain of contexts to the context containing the slot.
   2130     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2131     for (int i = 1; i < context_chain_length; i++) {
   2132       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2133     }
   2134   } else {
   2135     // Slot is in the current function context.  Move it into the
   2136     // destination register in case we store into it (the write barrier
   2137     // cannot be allowed to destroy the context in esi).
   2138     mov(dst, esi);
   2139   }
   2140 
   2141   // We should not have found a with context by walking the context chain
   2142   // (i.e., the static scope chain and runtime context chain do not agree).
   2143   // A variable occurring in such a scope should have slot type LOOKUP and
   2144   // not CONTEXT.
   2145   if (emit_debug_code()) {
   2146     cmp(FieldOperand(dst, HeapObject::kMapOffset),
   2147         isolate()->factory()->with_context_map());
   2148     Check(not_equal, "Variable resolved to with context.");
   2149   }
   2150 }
   2151 
   2152 
   2153 void MacroAssembler::LoadTransitionedArrayMapConditional(
   2154     ElementsKind expected_kind,
   2155     ElementsKind transitioned_kind,
   2156     Register map_in_out,
   2157     Register scratch,
   2158     Label* no_map_match) {
   2159   // Load the global or builtins object from the current context.
   2160   mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
   2161   mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
   2162 
   2163   // Check that the function's map is the same as the expected cached map.
   2164   int expected_index =
   2165       Context::GetContextMapIndexFromElementsKind(expected_kind);
   2166   cmp(map_in_out, Operand(scratch, Context::SlotOffset(expected_index)));
   2167   j(not_equal, no_map_match);
   2168 
   2169   // Use the transitioned cached map.
   2170   int trans_index =
   2171       Context::GetContextMapIndexFromElementsKind(transitioned_kind);
   2172   mov(map_in_out, Operand(scratch, Context::SlotOffset(trans_index)));
   2173 }
   2174 
   2175 
   2176 void MacroAssembler::LoadInitialArrayMap(
   2177     Register function_in, Register scratch, Register map_out) {
   2178   ASSERT(!function_in.is(map_out));
   2179   Label done;
   2180   mov(map_out, FieldOperand(function_in,
   2181                             JSFunction::kPrototypeOrInitialMapOffset));
   2182   if (!FLAG_smi_only_arrays) {
   2183     LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
   2184                                         FAST_ELEMENTS,
   2185                                         map_out,
   2186                                         scratch,
   2187                                         &done);
   2188   }
   2189   bind(&done);
   2190 }
   2191 
   2192 
   2193 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
   2194   // Load the global or builtins object from the current context.
   2195   mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
   2196   // Load the global context from the global or builtins object.
   2197   mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
   2198   // Load the function from the global context.
   2199   mov(function, Operand(function, Context::SlotOffset(index)));
   2200 }
   2201 
   2202 
   2203 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   2204                                                   Register map) {
   2205   // Load the initial map.  The global functions all have initial maps.
   2206   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2207   if (emit_debug_code()) {
   2208     Label ok, fail;
   2209     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
   2210     jmp(&ok);
   2211     bind(&fail);
   2212     Abort("Global functions must have initial map");
   2213     bind(&ok);
   2214   }
   2215 }
   2216 
   2217 
   2218 // Store the value in register src in the safepoint register stack
   2219 // slot for register dst.
   2220 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
   2221   mov(SafepointRegisterSlot(dst), src);
   2222 }
   2223 
   2224 
   2225 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
   2226   mov(SafepointRegisterSlot(dst), src);
   2227 }
   2228 
   2229 
   2230 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
   2231   mov(dst, SafepointRegisterSlot(src));
   2232 }
   2233 
   2234 
   2235 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
   2236   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
   2237 }
   2238 
   2239 
   2240 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
   2241   // The registers are pushed starting with the lowest encoding,
   2242   // which means that lowest encodings are furthest away from
   2243   // the stack pointer.
   2244   ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
   2245   return kNumSafepointRegisters - reg_code - 1;
   2246 }
   2247 
   2248 
   2249 void MacroAssembler::LoadHeapObject(Register result,
   2250                                     Handle<HeapObject> object) {
   2251   if (isolate()->heap()->InNewSpace(*object)) {
   2252     Handle<JSGlobalPropertyCell> cell =
   2253         isolate()->factory()->NewJSGlobalPropertyCell(object);
   2254     mov(result, Operand::Cell(cell));
   2255   } else {
   2256     mov(result, object);
   2257   }
   2258 }
   2259 
   2260 
   2261 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
   2262   if (isolate()->heap()->InNewSpace(*object)) {
   2263     Handle<JSGlobalPropertyCell> cell =
   2264         isolate()->factory()->NewJSGlobalPropertyCell(object);
   2265     push(Operand::Cell(cell));
   2266   } else {
   2267     Push(object);
   2268   }
   2269 }
   2270 
   2271 
   2272 void MacroAssembler::Ret() {
   2273   ret(0);
   2274 }
   2275 
   2276 
   2277 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
   2278   if (is_uint16(bytes_dropped)) {
   2279     ret(bytes_dropped);
   2280   } else {
   2281     pop(scratch);
   2282     add(esp, Immediate(bytes_dropped));
   2283     push(scratch);
   2284     ret(0);
   2285   }
   2286 }
   2287 
   2288 
   2289 void MacroAssembler::Drop(int stack_elements) {
   2290   if (stack_elements > 0) {
   2291     add(esp, Immediate(stack_elements * kPointerSize));
   2292   }
   2293 }
   2294 
   2295 
   2296 void MacroAssembler::Move(Register dst, Register src) {
   2297   if (!dst.is(src)) {
   2298     mov(dst, src);
   2299   }
   2300 }
   2301 
   2302 
   2303 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
   2304   if (FLAG_native_code_counters && counter->Enabled()) {
   2305     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
   2306   }
   2307 }
   2308 
   2309 
   2310 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
   2311   ASSERT(value > 0);
   2312   if (FLAG_native_code_counters && counter->Enabled()) {
   2313     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2314     if (value == 1) {
   2315       inc(operand);
   2316     } else {
   2317       add(operand, Immediate(value));
   2318     }
   2319   }
   2320 }
   2321 
   2322 
   2323 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
   2324   ASSERT(value > 0);
   2325   if (FLAG_native_code_counters && counter->Enabled()) {
   2326     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   2327     if (value == 1) {
   2328       dec(operand);
   2329     } else {
   2330       sub(operand, Immediate(value));
   2331     }
   2332   }
   2333 }
   2334 
   2335 
   2336 void MacroAssembler::IncrementCounter(Condition cc,
   2337                                       StatsCounter* counter,
   2338                                       int value) {
   2339   ASSERT(value > 0);
   2340   if (FLAG_native_code_counters && counter->Enabled()) {
   2341     Label skip;
   2342     j(NegateCondition(cc), &skip);
   2343     pushfd();
   2344     IncrementCounter(counter, value);
   2345     popfd();
   2346     bind(&skip);
   2347   }
   2348 }
   2349 
   2350 
   2351 void MacroAssembler::DecrementCounter(Condition cc,
   2352                                       StatsCounter* counter,
   2353                                       int value) {
   2354   ASSERT(value > 0);
   2355   if (FLAG_native_code_counters && counter->Enabled()) {
   2356     Label skip;
   2357     j(NegateCondition(cc), &skip);
   2358     pushfd();
   2359     DecrementCounter(counter, value);
   2360     popfd();
   2361     bind(&skip);
   2362   }
   2363 }
   2364 
   2365 
   2366 void MacroAssembler::Assert(Condition cc, const char* msg) {
   2367   if (emit_debug_code()) Check(cc, msg);
   2368 }
   2369 
   2370 
   2371 void MacroAssembler::AssertFastElements(Register elements) {
   2372   if (emit_debug_code()) {
   2373     Factory* factory = isolate()->factory();
   2374     Label ok;
   2375     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2376         Immediate(factory->fixed_array_map()));
   2377     j(equal, &ok);
   2378     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2379         Immediate(factory->fixed_double_array_map()));
   2380     j(equal, &ok);
   2381     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   2382         Immediate(factory->fixed_cow_array_map()));
   2383     j(equal, &ok);
   2384     Abort("JSObject with fast elements map has slow elements");
   2385     bind(&ok);
   2386   }
   2387 }
   2388 
   2389 
   2390 void MacroAssembler::Check(Condition cc, const char* msg) {
   2391   Label L;
   2392   j(cc, &L);
   2393   Abort(msg);
   2394   // will not return here
   2395   bind(&L);
   2396 }
   2397 
   2398 
   2399 void MacroAssembler::CheckStackAlignment() {
   2400   int frame_alignment = OS::ActivationFrameAlignment();
   2401   int frame_alignment_mask = frame_alignment - 1;
   2402   if (frame_alignment > kPointerSize) {
   2403     ASSERT(IsPowerOf2(frame_alignment));
   2404     Label alignment_as_expected;
   2405     test(esp, Immediate(frame_alignment_mask));
   2406     j(zero, &alignment_as_expected);
   2407     // Abort if stack is not aligned.
   2408     int3();
   2409     bind(&alignment_as_expected);
   2410   }
   2411 }
   2412 
   2413 
   2414 void MacroAssembler::Abort(const char* msg) {
   2415   // We want to pass the msg string like a smi to avoid GC
   2416   // problems, however msg is not guaranteed to be aligned
   2417   // properly. Instead, we pass an aligned pointer that is
   2418   // a proper v8 smi, but also pass the alignment difference
   2419   // from the real pointer as a smi.
   2420   intptr_t p1 = reinterpret_cast<intptr_t>(msg);
   2421   intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
   2422   ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
   2423 #ifdef DEBUG
   2424   if (msg != NULL) {
   2425     RecordComment("Abort message: ");
   2426     RecordComment(msg);
   2427   }
   2428 #endif
   2429 
   2430   push(eax);
   2431   push(Immediate(p0));
   2432   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
   2433   // Disable stub call restrictions to always allow calls to abort.
   2434   if (!has_frame_) {
   2435     // We don't actually want to generate a pile of code for this, so just
   2436     // claim there is a stack frame, without generating one.
   2437     FrameScope scope(this, StackFrame::NONE);
   2438     CallRuntime(Runtime::kAbort, 2);
   2439   } else {
   2440     CallRuntime(Runtime::kAbort, 2);
   2441   }
   2442   // will not return here
   2443   int3();
   2444 }
   2445 
   2446 
   2447 void MacroAssembler::LoadInstanceDescriptors(Register map,
   2448                                              Register descriptors) {
   2449   mov(descriptors,
   2450       FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
   2451   Label not_smi;
   2452   JumpIfNotSmi(descriptors, &not_smi);
   2453   mov(descriptors, isolate()->factory()->empty_descriptor_array());
   2454   bind(&not_smi);
   2455 }
   2456 
   2457 
   2458 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
   2459                                   Register scratch,
   2460                                   int power) {
   2461   ASSERT(is_uintn(power + HeapNumber::kExponentBias,
   2462                   HeapNumber::kExponentBits));
   2463   mov(scratch, Immediate(power + HeapNumber::kExponentBias));
   2464   movd(dst, scratch);
   2465   psllq(dst, HeapNumber::kMantissaBits);
   2466 }
   2467 
   2468 
   2469 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
   2470     Register instance_type,
   2471     Register scratch,
   2472     Label* failure) {
   2473   if (!scratch.is(instance_type)) {
   2474     mov(scratch, instance_type);
   2475   }
   2476   and_(scratch,
   2477        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
   2478   cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
   2479   j(not_equal, failure);
   2480 }
   2481 
   2482 
   2483 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
   2484                                                          Register object2,
   2485                                                          Register scratch1,
   2486                                                          Register scratch2,
   2487                                                          Label* failure) {
   2488   // Check that both objects are not smis.
   2489   STATIC_ASSERT(kSmiTag == 0);
   2490   mov(scratch1, object1);
   2491   and_(scratch1, object2);
   2492   JumpIfSmi(scratch1, failure);
   2493 
   2494   // Load instance type for both strings.
   2495   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
   2496   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
   2497   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
   2498   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
   2499 
   2500   // Check that both are flat ASCII strings.
   2501   const int kFlatAsciiStringMask =
   2502       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
   2503   const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
   2504   // Interleave bits from both instance types and compare them in one check.
   2505   ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
   2506   and_(scratch1, kFlatAsciiStringMask);
   2507   and_(scratch2, kFlatAsciiStringMask);
   2508   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
   2509   cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
   2510   j(not_equal, failure);
   2511 }
   2512 
   2513 
   2514 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
   2515   int frame_alignment = OS::ActivationFrameAlignment();
   2516   if (frame_alignment != 0) {
   2517     // Make stack end at alignment and make room for num_arguments words
   2518     // and the original value of esp.
   2519     mov(scratch, esp);
   2520     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
   2521     ASSERT(IsPowerOf2(frame_alignment));
   2522     and_(esp, -frame_alignment);
   2523     mov(Operand(esp, num_arguments * kPointerSize), scratch);
   2524   } else {
   2525     sub(esp, Immediate(num_arguments * kPointerSize));
   2526   }
   2527 }
   2528 
   2529 
   2530 void MacroAssembler::CallCFunction(ExternalReference function,
   2531                                    int num_arguments) {
   2532   // Trashing eax is ok as it will be the return value.
   2533   mov(eax, Immediate(function));
   2534   CallCFunction(eax, num_arguments);
   2535 }
   2536 
   2537 
   2538 void MacroAssembler::CallCFunction(Register function,
   2539                                    int num_arguments) {
   2540   ASSERT(has_frame());
   2541   // Check stack alignment.
   2542   if (emit_debug_code()) {
   2543     CheckStackAlignment();
   2544   }
   2545 
   2546   call(function);
   2547   if (OS::ActivationFrameAlignment() != 0) {
   2548     mov(esp, Operand(esp, num_arguments * kPointerSize));
   2549   } else {
   2550     add(esp, Immediate(num_arguments * kPointerSize));
   2551   }
   2552 }
   2553 
   2554 
   2555 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
   2556   if (r1.is(r2)) return true;
   2557   if (r1.is(r3)) return true;
   2558   if (r1.is(r4)) return true;
   2559   if (r2.is(r3)) return true;
   2560   if (r2.is(r4)) return true;
   2561   if (r3.is(r4)) return true;
   2562   return false;
   2563 }
   2564 
   2565 
   2566 CodePatcher::CodePatcher(byte* address, int size)
   2567     : address_(address),
   2568       size_(size),
   2569       masm_(Isolate::Current(), address, size + Assembler::kGap) {
   2570   // Create a new macro assembler pointing to the address of the code to patch.
   2571   // The size is adjusted with kGap on order for the assembler to generate size
   2572   // bytes of instructions without failing with buffer size constraints.
   2573   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2574 }
   2575 
   2576 
   2577 CodePatcher::~CodePatcher() {
   2578   // Indicate that code has changed.
   2579   CPU::FlushICache(address_, size_);
   2580 
   2581   // Check that the code was patched as expected.
   2582   ASSERT(masm_.pc_ == address_ + size_);
   2583   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2584 }
   2585 
   2586 
   2587 void MacroAssembler::CheckPageFlag(
   2588     Register object,
   2589     Register scratch,
   2590     int mask,
   2591     Condition cc,
   2592     Label* condition_met,
   2593     Label::Distance condition_met_distance) {
   2594   ASSERT(cc == zero || cc == not_zero);
   2595   if (scratch.is(object)) {
   2596     and_(scratch, Immediate(~Page::kPageAlignmentMask));
   2597   } else {
   2598     mov(scratch, Immediate(~Page::kPageAlignmentMask));
   2599     and_(scratch, object);
   2600   }
   2601   if (mask < (1 << kBitsPerByte)) {
   2602     test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
   2603            static_cast<uint8_t>(mask));
   2604   } else {
   2605     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
   2606   }
   2607   j(cc, condition_met, condition_met_distance);
   2608 }
   2609 
   2610 
   2611 void MacroAssembler::JumpIfBlack(Register object,
   2612                                  Register scratch0,
   2613                                  Register scratch1,
   2614                                  Label* on_black,
   2615                                  Label::Distance on_black_near) {
   2616   HasColor(object, scratch0, scratch1,
   2617            on_black, on_black_near,
   2618            1, 0);  // kBlackBitPattern.
   2619   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
   2620 }
   2621 
   2622 
   2623 void MacroAssembler::HasColor(Register object,
   2624                               Register bitmap_scratch,
   2625                               Register mask_scratch,
   2626                               Label* has_color,
   2627                               Label::Distance has_color_distance,
   2628                               int first_bit,
   2629                               int second_bit) {
   2630   ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
   2631 
   2632   GetMarkBits(object, bitmap_scratch, mask_scratch);
   2633 
   2634   Label other_color, word_boundary;
   2635   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   2636   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
   2637   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
   2638   j(zero, &word_boundary, Label::kNear);
   2639   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   2640   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   2641   jmp(&other_color, Label::kNear);
   2642 
   2643   bind(&word_boundary);
   2644   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
   2645 
   2646   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
   2647   bind(&other_color);
   2648 }
   2649 
   2650 
   2651 void MacroAssembler::GetMarkBits(Register addr_reg,
   2652                                  Register bitmap_reg,
   2653                                  Register mask_reg) {
   2654   ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
   2655   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
   2656   and_(bitmap_reg, addr_reg);
   2657   mov(ecx, addr_reg);
   2658   int shift =
   2659       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
   2660   shr(ecx, shift);
   2661   and_(ecx,
   2662        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
   2663 
   2664   add(bitmap_reg, ecx);
   2665   mov(ecx, addr_reg);
   2666   shr(ecx, kPointerSizeLog2);
   2667   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
   2668   mov(mask_reg, Immediate(1));
   2669   shl_cl(mask_reg);
   2670 }
   2671 
   2672 
   2673 void MacroAssembler::EnsureNotWhite(
   2674     Register value,
   2675     Register bitmap_scratch,
   2676     Register mask_scratch,
   2677     Label* value_is_white_and_not_data,
   2678     Label::Distance distance) {
   2679   ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
   2680   GetMarkBits(value, bitmap_scratch, mask_scratch);
   2681 
   2682   // If the value is black or grey we don't need to do anything.
   2683   ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
   2684   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
   2685   ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
   2686   ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
   2687 
   2688   Label done;
   2689 
   2690   // Since both black and grey have a 1 in the first position and white does
   2691   // not have a 1 there we only need to check one bit.
   2692   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   2693   j(not_zero, &done, Label::kNear);
   2694 
   2695   if (FLAG_debug_code) {
   2696     // Check for impossible bit pattern.
   2697     Label ok;
   2698     push(mask_scratch);
   2699     // shl.  May overflow making the check conservative.
   2700     add(mask_scratch, mask_scratch);
   2701     test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
   2702     j(zero, &ok, Label::kNear);
   2703     int3();
   2704     bind(&ok);
   2705     pop(mask_scratch);
   2706   }
   2707 
   2708   // Value is white.  We check whether it is data that doesn't need scanning.
   2709   // Currently only checks for HeapNumber and non-cons strings.
   2710   Register map = ecx;  // Holds map while checking type.
   2711   Register length = ecx;  // Holds length of object after checking type.
   2712   Label not_heap_number;
   2713   Label is_data_object;
   2714 
   2715   // Check for heap-number
   2716   mov(map, FieldOperand(value, HeapObject::kMapOffset));
   2717   cmp(map, FACTORY->heap_number_map());
   2718   j(not_equal, &not_heap_number, Label::kNear);
   2719   mov(length, Immediate(HeapNumber::kSize));
   2720   jmp(&is_data_object, Label::kNear);
   2721 
   2722   bind(&not_heap_number);
   2723   // Check for strings.
   2724   ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
   2725   ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
   2726   // If it's a string and it's not a cons string then it's an object containing
   2727   // no GC pointers.
   2728   Register instance_type = ecx;
   2729   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
   2730   test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
   2731   j(not_zero, value_is_white_and_not_data);
   2732   // It's a non-indirect (non-cons and non-slice) string.
   2733   // If it's external, the length is just ExternalString::kSize.
   2734   // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
   2735   Label not_external;
   2736   // External strings are the only ones with the kExternalStringTag bit
   2737   // set.
   2738   ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
   2739   ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
   2740   test_b(instance_type, kExternalStringTag);
   2741   j(zero, &not_external, Label::kNear);
   2742   mov(length, Immediate(ExternalString::kSize));
   2743   jmp(&is_data_object, Label::kNear);
   2744 
   2745   bind(&not_external);
   2746   // Sequential string, either ASCII or UC16.
   2747   ASSERT(kAsciiStringTag == 0x04);
   2748   and_(length, Immediate(kStringEncodingMask));
   2749   xor_(length, Immediate(kStringEncodingMask));
   2750   add(length, Immediate(0x04));
   2751   // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
   2752   // by 2. If we multiply the string length as smi by this, it still
   2753   // won't overflow a 32-bit value.
   2754   ASSERT_EQ(SeqAsciiString::kMaxSize, SeqTwoByteString::kMaxSize);
   2755   ASSERT(SeqAsciiString::kMaxSize <=
   2756          static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
   2757   imul(length, FieldOperand(value, String::kLengthOffset));
   2758   shr(length, 2 + kSmiTagSize + kSmiShiftSize);
   2759   add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
   2760   and_(length, Immediate(~kObjectAlignmentMask));
   2761 
   2762   bind(&is_data_object);
   2763   // Value is a data object, and it is white.  Mark it black.  Since we know
   2764   // that the object is white we can make it black by flipping one bit.
   2765   or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
   2766 
   2767   and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
   2768   add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
   2769       length);
   2770   if (FLAG_debug_code) {
   2771     mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
   2772     cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
   2773     Check(less_equal, "Live Bytes Count overflow chunk size");
   2774   }
   2775 
   2776   bind(&done);
   2777 }
   2778 
   2779 
   2780 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
   2781   Label next;
   2782   mov(ecx, eax);
   2783   bind(&next);
   2784 
   2785   // Check that there are no elements.  Register ecx contains the
   2786   // current JS object we've reached through the prototype chain.
   2787   cmp(FieldOperand(ecx, JSObject::kElementsOffset),
   2788       isolate()->factory()->empty_fixed_array());
   2789   j(not_equal, call_runtime);
   2790 
   2791   // Check that instance descriptors are not empty so that we can
   2792   // check for an enum cache.  Leave the map in ebx for the subsequent
   2793   // prototype load.
   2794   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
   2795   mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOrBitField3Offset));
   2796   JumpIfSmi(edx, call_runtime);
   2797 
   2798   // Check that there is an enum cache in the non-empty instance
   2799   // descriptors (edx).  This is the case if the next enumeration
   2800   // index field does not contain a smi.
   2801   mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
   2802   JumpIfSmi(edx, call_runtime);
   2803 
   2804   // For all objects but the receiver, check that the cache is empty.
   2805   Label check_prototype;
   2806   cmp(ecx, eax);
   2807   j(equal, &check_prototype, Label::kNear);
   2808   mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
   2809   cmp(edx, isolate()->factory()->empty_fixed_array());
   2810   j(not_equal, call_runtime);
   2811 
   2812   // Load the prototype from the map and loop if non-null.
   2813   bind(&check_prototype);
   2814   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
   2815   cmp(ecx, isolate()->factory()->null_value());
   2816   j(not_equal, &next);
   2817 }
   2818 
   2819 } }  // namespace v8::internal
   2820 
   2821 #endif  // V8_TARGET_ARCH_IA32
   2822