Home | History | Annotate | Download | only in ia32
      1 // Copyright 2006-2009 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #include "bootstrapper.h"
     31 #include "codegen-inl.h"
     32 #include "debug.h"
     33 #include "runtime.h"
     34 #include "serialize.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 // -------------------------------------------------------------------------
     40 // MacroAssembler implementation.
     41 
     42 MacroAssembler::MacroAssembler(void* buffer, int size)
     43     : Assembler(buffer, size),
     44       generating_stub_(false),
     45       allow_stub_calls_(true),
     46       code_object_(Heap::undefined_value()) {
     47 }
     48 
     49 
     50 static void RecordWriteHelper(MacroAssembler* masm,
     51                               Register object,
     52                               Register addr,
     53                               Register scratch) {
     54   Label fast;
     55 
     56   // Compute the page start address from the heap object pointer, and reuse
     57   // the 'object' register for it.
     58   masm->and_(object, ~Page::kPageAlignmentMask);
     59   Register page_start = object;
     60 
     61   // Compute the bit addr in the remembered set/index of the pointer in the
     62   // page. Reuse 'addr' as pointer_offset.
     63   masm->sub(addr, Operand(page_start));
     64   masm->shr(addr, kObjectAlignmentBits);
     65   Register pointer_offset = addr;
     66 
     67   // If the bit offset lies beyond the normal remembered set range, it is in
     68   // the extra remembered set area of a large object.
     69   masm->cmp(pointer_offset, Page::kPageSize / kPointerSize);
     70   masm->j(less, &fast);
     71 
     72   // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
     73   // extra remembered set after the large object.
     74 
     75   // Find the length of the large object (FixedArray).
     76   masm->mov(scratch, Operand(page_start, Page::kObjectStartOffset
     77                                          + FixedArray::kLengthOffset));
     78   Register array_length = scratch;
     79 
     80   // Extra remembered set starts right after the large object (a FixedArray), at
     81   //   page_start + kObjectStartOffset + objectSize
     82   // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
     83   // Add the delta between the end of the normal RSet and the start of the
     84   // extra RSet to 'page_start', so that addressing the bit using
     85   // 'pointer_offset' hits the extra RSet words.
     86   masm->lea(page_start,
     87             Operand(page_start, array_length, times_pointer_size,
     88                     Page::kObjectStartOffset + FixedArray::kHeaderSize
     89                         - Page::kRSetEndOffset));
     90 
     91   // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
     92   // to limit code size. We should probably evaluate this decision by
     93   // measuring the performance of an equivalent implementation using
     94   // "simpler" instructions
     95   masm->bind(&fast);
     96   masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
     97 }
     98 
     99 
    100 class RecordWriteStub : public CodeStub {
    101  public:
    102   RecordWriteStub(Register object, Register addr, Register scratch)
    103       : object_(object), addr_(addr), scratch_(scratch) { }
    104 
    105   void Generate(MacroAssembler* masm);
    106 
    107  private:
    108   Register object_;
    109   Register addr_;
    110   Register scratch_;
    111 
    112 #ifdef DEBUG
    113   void Print() {
    114     PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
    115            object_.code(), addr_.code(), scratch_.code());
    116   }
    117 #endif
    118 
    119   // Minor key encoding in 12 bits of three registers (object, address and
    120   // scratch) OOOOAAAASSSS.
    121   class ScratchBits: public BitField<uint32_t, 0, 4> {};
    122   class AddressBits: public BitField<uint32_t, 4, 4> {};
    123   class ObjectBits: public BitField<uint32_t, 8, 4> {};
    124 
    125   Major MajorKey() { return RecordWrite; }
    126 
    127   int MinorKey() {
    128     // Encode the registers.
    129     return ObjectBits::encode(object_.code()) |
    130            AddressBits::encode(addr_.code()) |
    131            ScratchBits::encode(scratch_.code());
    132   }
    133 };
    134 
    135 
    136 void RecordWriteStub::Generate(MacroAssembler* masm) {
    137   RecordWriteHelper(masm, object_, addr_, scratch_);
    138   masm->ret(0);
    139 }
    140 
    141 
    142 // Set the remembered set bit for [object+offset].
    143 // object is the object being stored into, value is the object being stored.
    144 // If offset is zero, then the scratch register contains the array index into
    145 // the elements array represented as a Smi.
    146 // All registers are clobbered by the operation.
    147 void MacroAssembler::RecordWrite(Register object, int offset,
    148                                  Register value, Register scratch) {
    149   // The compiled code assumes that record write doesn't change the
    150   // context register, so we check that none of the clobbered
    151   // registers are esi.
    152   ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
    153 
    154   // First, check if a remembered set write is even needed. The tests below
    155   // catch stores of Smis and stores into young gen (which does not have space
    156   // for the remembered set bits.
    157   Label done;
    158 
    159   // Skip barrier if writing a smi.
    160   ASSERT_EQ(0, kSmiTag);
    161   test(value, Immediate(kSmiTagMask));
    162   j(zero, &done);
    163 
    164   if (Serializer::enabled()) {
    165     // Can't do arithmetic on external references if it might get serialized.
    166     mov(value, Operand(object));
    167     // The mask isn't really an address.  We load it as an external reference in
    168     // case the size of the new space is different between the snapshot maker
    169     // and the running system.
    170     and_(Operand(value), Immediate(ExternalReference::new_space_mask()));
    171     cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
    172     j(equal, &done);
    173   } else {
    174     int32_t new_space_start = reinterpret_cast<int32_t>(
    175         ExternalReference::new_space_start().address());
    176     lea(value, Operand(object, -new_space_start));
    177     and_(value, Heap::NewSpaceMask());
    178     j(equal, &done);
    179   }
    180 
    181   if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
    182     // Compute the bit offset in the remembered set, leave it in 'value'.
    183     lea(value, Operand(object, offset));
    184     and_(value, Page::kPageAlignmentMask);
    185     shr(value, kPointerSizeLog2);
    186 
    187     // Compute the page address from the heap object pointer, leave it in
    188     // 'object'.
    189     and_(object, ~Page::kPageAlignmentMask);
    190 
    191     // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
    192     // to limit code size. We should probably evaluate this decision by
    193     // measuring the performance of an equivalent implementation using
    194     // "simpler" instructions
    195     bts(Operand(object, Page::kRSetOffset), value);
    196   } else {
    197     Register dst = scratch;
    198     if (offset != 0) {
    199       lea(dst, Operand(object, offset));
    200     } else {
    201       // array access: calculate the destination address in the same manner as
    202       // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
    203       // into an array of words.
    204       ASSERT_EQ(1, kSmiTagSize);
    205       ASSERT_EQ(0, kSmiTag);
    206       lea(dst, Operand(object, dst, times_half_pointer_size,
    207                        FixedArray::kHeaderSize - kHeapObjectTag));
    208     }
    209     // If we are already generating a shared stub, not inlining the
    210     // record write code isn't going to save us any memory.
    211     if (generating_stub()) {
    212       RecordWriteHelper(this, object, dst, value);
    213     } else {
    214       RecordWriteStub stub(object, dst, value);
    215       CallStub(&stub);
    216     }
    217   }
    218 
    219   bind(&done);
    220 
    221   // Clobber all input registers when running with the debug-code flag
    222   // turned on to provoke errors.
    223   if (FLAG_debug_code) {
    224     mov(object, Immediate(bit_cast<int32_t>(kZapValue)));
    225     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
    226     mov(scratch, Immediate(bit_cast<int32_t>(kZapValue)));
    227   }
    228 }
    229 
    230 
    231 void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
    232   cmp(esp,
    233       Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
    234   j(below, on_stack_overflow);
    235 }
    236 
    237 
    238 #ifdef ENABLE_DEBUGGER_SUPPORT
    239 void MacroAssembler::SaveRegistersToMemory(RegList regs) {
    240   ASSERT((regs & ~kJSCallerSaved) == 0);
    241   // Copy the content of registers to memory location.
    242   for (int i = 0; i < kNumJSCallerSaved; i++) {
    243     int r = JSCallerSavedCode(i);
    244     if ((regs & (1 << r)) != 0) {
    245       Register reg = { r };
    246       ExternalReference reg_addr =
    247           ExternalReference(Debug_Address::Register(i));
    248       mov(Operand::StaticVariable(reg_addr), reg);
    249     }
    250   }
    251 }
    252 
    253 
    254 void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
    255   ASSERT((regs & ~kJSCallerSaved) == 0);
    256   // Copy the content of memory location to registers.
    257   for (int i = kNumJSCallerSaved; --i >= 0;) {
    258     int r = JSCallerSavedCode(i);
    259     if ((regs & (1 << r)) != 0) {
    260       Register reg = { r };
    261       ExternalReference reg_addr =
    262           ExternalReference(Debug_Address::Register(i));
    263       mov(reg, Operand::StaticVariable(reg_addr));
    264     }
    265   }
    266 }
    267 
    268 
    269 void MacroAssembler::PushRegistersFromMemory(RegList regs) {
    270   ASSERT((regs & ~kJSCallerSaved) == 0);
    271   // Push the content of the memory location to the stack.
    272   for (int i = 0; i < kNumJSCallerSaved; i++) {
    273     int r = JSCallerSavedCode(i);
    274     if ((regs & (1 << r)) != 0) {
    275       ExternalReference reg_addr =
    276           ExternalReference(Debug_Address::Register(i));
    277       push(Operand::StaticVariable(reg_addr));
    278     }
    279   }
    280 }
    281 
    282 
    283 void MacroAssembler::PopRegistersToMemory(RegList regs) {
    284   ASSERT((regs & ~kJSCallerSaved) == 0);
    285   // Pop the content from the stack to the memory location.
    286   for (int i = kNumJSCallerSaved; --i >= 0;) {
    287     int r = JSCallerSavedCode(i);
    288     if ((regs & (1 << r)) != 0) {
    289       ExternalReference reg_addr =
    290           ExternalReference(Debug_Address::Register(i));
    291       pop(Operand::StaticVariable(reg_addr));
    292     }
    293   }
    294 }
    295 
    296 
    297 void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
    298                                                     Register scratch,
    299                                                     RegList regs) {
    300   ASSERT((regs & ~kJSCallerSaved) == 0);
    301   // Copy the content of the stack to the memory location and adjust base.
    302   for (int i = kNumJSCallerSaved; --i >= 0;) {
    303     int r = JSCallerSavedCode(i);
    304     if ((regs & (1 << r)) != 0) {
    305       mov(scratch, Operand(base, 0));
    306       ExternalReference reg_addr =
    307           ExternalReference(Debug_Address::Register(i));
    308       mov(Operand::StaticVariable(reg_addr), scratch);
    309       lea(base, Operand(base, kPointerSize));
    310     }
    311   }
    312 }
    313 
    314 void MacroAssembler::DebugBreak() {
    315   Set(eax, Immediate(0));
    316   mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
    317   CEntryStub ces(1);
    318   call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
    319 }
    320 #endif
    321 
    322 void MacroAssembler::Set(Register dst, const Immediate& x) {
    323   if (x.is_zero()) {
    324     xor_(dst, Operand(dst));  // shorter than mov
    325   } else {
    326     mov(dst, x);
    327   }
    328 }
    329 
    330 
    331 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
    332   mov(dst, x);
    333 }
    334 
    335 
    336 void MacroAssembler::CmpObjectType(Register heap_object,
    337                                    InstanceType type,
    338                                    Register map) {
    339   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    340   CmpInstanceType(map, type);
    341 }
    342 
    343 
    344 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    345   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
    346        static_cast<int8_t>(type));
    347 }
    348 
    349 
    350 void MacroAssembler::CheckMap(Register obj,
    351                               Handle<Map> map,
    352                               Label* fail,
    353                               bool is_heap_object) {
    354   if (!is_heap_object) {
    355     test(obj, Immediate(kSmiTagMask));
    356     j(zero, fail);
    357   }
    358   cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
    359   j(not_equal, fail);
    360 }
    361 
    362 
    363 Condition MacroAssembler::IsObjectStringType(Register heap_object,
    364                                              Register map,
    365                                              Register instance_type) {
    366   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    367   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    368   ASSERT(kNotStringTag != 0);
    369   test(instance_type, Immediate(kIsNotStringMask));
    370   return zero;
    371 }
    372 
    373 
    374 void MacroAssembler::FCmp() {
    375   if (CpuFeatures::IsSupported(CMOV)) {
    376     fucomip();
    377     ffree(0);
    378     fincstp();
    379   } else {
    380     fucompp();
    381     push(eax);
    382     fnstsw_ax();
    383     sahf();
    384     pop(eax);
    385   }
    386 }
    387 
    388 
    389 void MacroAssembler::AbortIfNotNumber(Register object, const char* msg) {
    390   Label ok;
    391   test(object, Immediate(kSmiTagMask));
    392   j(zero, &ok);
    393   cmp(FieldOperand(object, HeapObject::kMapOffset),
    394       Factory::heap_number_map());
    395   Assert(equal, msg);
    396   bind(&ok);
    397 }
    398 
    399 
    400 void MacroAssembler::EnterFrame(StackFrame::Type type) {
    401   push(ebp);
    402   mov(ebp, Operand(esp));
    403   push(esi);
    404   push(Immediate(Smi::FromInt(type)));
    405   push(Immediate(CodeObject()));
    406   if (FLAG_debug_code) {
    407     cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
    408     Check(not_equal, "code object not properly patched");
    409   }
    410 }
    411 
    412 
    413 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
    414   if (FLAG_debug_code) {
    415     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
    416         Immediate(Smi::FromInt(type)));
    417     Check(equal, "stack frame types must match");
    418   }
    419   leave();
    420 }
    421 
    422 void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) {
    423   // Setup the frame structure on the stack.
    424   ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
    425   ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
    426   ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
    427   push(ebp);
    428   mov(ebp, Operand(esp));
    429 
    430   // Reserve room for entry stack pointer and push the debug marker.
    431   ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
    432   push(Immediate(0));  // Saved entry sp, patched before call.
    433   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
    434 
    435   // Save the frame pointer and the context in top.
    436   ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
    437   ExternalReference context_address(Top::k_context_address);
    438   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
    439   mov(Operand::StaticVariable(context_address), esi);
    440 }
    441 
    442 void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) {
    443 #ifdef ENABLE_DEBUGGER_SUPPORT
    444   // Save the state of all registers to the stack from the memory
    445   // location. This is needed to allow nested break points.
    446   if (mode == ExitFrame::MODE_DEBUG) {
    447     // TODO(1243899): This should be symmetric to
    448     // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
    449     // correct here, but computed for the other call. Very error
    450     // prone! FIX THIS.  Actually there are deeper problems with
    451     // register saving than this asymmetry (see the bug report
    452     // associated with this issue).
    453     PushRegistersFromMemory(kJSCallerSaved);
    454   }
    455 #endif
    456 
    457   // Reserve space for arguments.
    458   sub(Operand(esp), Immediate(argc * kPointerSize));
    459 
    460   // Get the required frame alignment for the OS.
    461   static const int kFrameAlignment = OS::ActivationFrameAlignment();
    462   if (kFrameAlignment > 0) {
    463     ASSERT(IsPowerOf2(kFrameAlignment));
    464     and_(esp, -kFrameAlignment);
    465   }
    466 
    467   // Patch the saved entry sp.
    468   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
    469 }
    470 
    471 
    472 void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
    473   EnterExitFramePrologue(mode);
    474 
    475   // Setup argc and argv in callee-saved registers.
    476   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
    477   mov(edi, Operand(eax));
    478   lea(esi, Operand(ebp, eax, times_4, offset));
    479 
    480   EnterExitFrameEpilogue(mode, 2);
    481 }
    482 
    483 
    484 void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
    485                                        int stack_space,
    486                                        int argc) {
    487   EnterExitFramePrologue(mode);
    488 
    489   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
    490   lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
    491 
    492   EnterExitFrameEpilogue(mode, argc);
    493 }
    494 
    495 
    496 void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
    497 #ifdef ENABLE_DEBUGGER_SUPPORT
    498   // Restore the memory copy of the registers by digging them out from
    499   // the stack. This is needed to allow nested break points.
    500   if (mode == ExitFrame::MODE_DEBUG) {
    501     // It's okay to clobber register ebx below because we don't need
    502     // the function pointer after this.
    503     const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
    504     int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
    505     lea(ebx, Operand(ebp, kOffset));
    506     CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
    507   }
    508 #endif
    509 
    510   // Get the return address from the stack and restore the frame pointer.
    511   mov(ecx, Operand(ebp, 1 * kPointerSize));
    512   mov(ebp, Operand(ebp, 0 * kPointerSize));
    513 
    514   // Pop the arguments and the receiver from the caller stack.
    515   lea(esp, Operand(esi, 1 * kPointerSize));
    516 
    517   // Restore current context from top and clear it in debug mode.
    518   ExternalReference context_address(Top::k_context_address);
    519   mov(esi, Operand::StaticVariable(context_address));
    520 #ifdef DEBUG
    521   mov(Operand::StaticVariable(context_address), Immediate(0));
    522 #endif
    523 
    524   // Push the return address to get ready to return.
    525   push(ecx);
    526 
    527   // Clear the top frame.
    528   ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
    529   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
    530 }
    531 
    532 
    533 void MacroAssembler::PushTryHandler(CodeLocation try_location,
    534                                     HandlerType type) {
    535   // Adjust this code if not the case.
    536   ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
    537   // The pc (return address) is already on TOS.
    538   if (try_location == IN_JAVASCRIPT) {
    539     if (type == TRY_CATCH_HANDLER) {
    540       push(Immediate(StackHandler::TRY_CATCH));
    541     } else {
    542       push(Immediate(StackHandler::TRY_FINALLY));
    543     }
    544     push(ebp);
    545   } else {
    546     ASSERT(try_location == IN_JS_ENTRY);
    547     // The frame pointer does not point to a JS frame so we save NULL
    548     // for ebp. We expect the code throwing an exception to check ebp
    549     // before dereferencing it to restore the context.
    550     push(Immediate(StackHandler::ENTRY));
    551     push(Immediate(0));  // NULL frame pointer.
    552   }
    553   // Save the current handler as the next handler.
    554   push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
    555   // Link this handler as the new current one.
    556   mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
    557 }
    558 
    559 
    560 void MacroAssembler::PopTryHandler() {
    561   ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
    562   pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
    563   add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
    564 }
    565 
    566 
    567 Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
    568                                    JSObject* holder, Register holder_reg,
    569                                    Register scratch,
    570                                    int save_at_depth,
    571                                    Label* miss) {
    572   // Make sure there's no overlap between scratch and the other
    573   // registers.
    574   ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
    575 
    576   // Keep track of the current object in register reg.
    577   Register reg = object_reg;
    578   int depth = 0;
    579 
    580   if (save_at_depth == depth) {
    581     mov(Operand(esp, kPointerSize), object_reg);
    582   }
    583 
    584   // Check the maps in the prototype chain.
    585   // Traverse the prototype chain from the object and do map checks.
    586   while (object != holder) {
    587     depth++;
    588 
    589     // Only global objects and objects that do not require access
    590     // checks are allowed in stubs.
    591     ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    592 
    593     JSObject* prototype = JSObject::cast(object->GetPrototype());
    594     if (Heap::InNewSpace(prototype)) {
    595       // Get the map of the current object.
    596       mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
    597       cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
    598       // Branch on the result of the map check.
    599       j(not_equal, miss, not_taken);
    600       // Check access rights to the global object.  This has to happen
    601       // after the map check so that we know that the object is
    602       // actually a global object.
    603       if (object->IsJSGlobalProxy()) {
    604         CheckAccessGlobalProxy(reg, scratch, miss);
    605 
    606         // Restore scratch register to be the map of the object.
    607         // We load the prototype from the map in the scratch register.
    608         mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
    609       }
    610       // The prototype is in new space; we cannot store a reference
    611       // to it in the code. Load it from the map.
    612       reg = holder_reg;  // from now the object is in holder_reg
    613       mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
    614     } else {
    615       // Check the map of the current object.
    616       cmp(FieldOperand(reg, HeapObject::kMapOffset),
    617           Immediate(Handle<Map>(object->map())));
    618       // Branch on the result of the map check.
    619       j(not_equal, miss, not_taken);
    620       // Check access rights to the global object.  This has to happen
    621       // after the map check so that we know that the object is
    622       // actually a global object.
    623       if (object->IsJSGlobalProxy()) {
    624         CheckAccessGlobalProxy(reg, scratch, miss);
    625       }
    626       // The prototype is in old space; load it directly.
    627       reg = holder_reg;  // from now the object is in holder_reg
    628       mov(reg, Handle<JSObject>(prototype));
    629     }
    630 
    631     if (save_at_depth == depth) {
    632       mov(Operand(esp, kPointerSize), reg);
    633     }
    634 
    635     // Go to the next object in the prototype chain.
    636     object = prototype;
    637   }
    638 
    639   // Check the holder map.
    640   cmp(FieldOperand(reg, HeapObject::kMapOffset),
    641       Immediate(Handle<Map>(holder->map())));
    642   j(not_equal, miss, not_taken);
    643 
    644   // Log the check depth.
    645   LOG(IntEvent("check-maps-depth", depth + 1));
    646 
    647   // Perform security check for access to the global object and return
    648   // the holder register.
    649   ASSERT(object == holder);
    650   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    651   if (object->IsJSGlobalProxy()) {
    652     CheckAccessGlobalProxy(reg, scratch, miss);
    653   }
    654   return reg;
    655 }
    656 
    657 
    658 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
    659                                             Register scratch,
    660                                             Label* miss) {
    661   Label same_contexts;
    662 
    663   ASSERT(!holder_reg.is(scratch));
    664 
    665   // Load current lexical context from the stack frame.
    666   mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
    667 
    668   // When generating debug code, make sure the lexical context is set.
    669   if (FLAG_debug_code) {
    670     cmp(Operand(scratch), Immediate(0));
    671     Check(not_equal, "we should not have an empty lexical context");
    672   }
    673   // Load the global context of the current context.
    674   int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
    675   mov(scratch, FieldOperand(scratch, offset));
    676   mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
    677 
    678   // Check the context is a global context.
    679   if (FLAG_debug_code) {
    680     push(scratch);
    681     // Read the first word and compare to global_context_map.
    682     mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
    683     cmp(scratch, Factory::global_context_map());
    684     Check(equal, "JSGlobalObject::global_context should be a global context.");
    685     pop(scratch);
    686   }
    687 
    688   // Check if both contexts are the same.
    689   cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
    690   j(equal, &same_contexts, taken);
    691 
    692   // Compare security tokens, save holder_reg on the stack so we can use it
    693   // as a temporary register.
    694   //
    695   // TODO(119): avoid push(holder_reg)/pop(holder_reg)
    696   push(holder_reg);
    697   // Check that the security token in the calling global object is
    698   // compatible with the security token in the receiving global
    699   // object.
    700   mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
    701 
    702   // Check the context is a global context.
    703   if (FLAG_debug_code) {
    704     cmp(holder_reg, Factory::null_value());
    705     Check(not_equal, "JSGlobalProxy::context() should not be null.");
    706 
    707     push(holder_reg);
    708     // Read the first word and compare to global_context_map(),
    709     mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
    710     cmp(holder_reg, Factory::global_context_map());
    711     Check(equal, "JSGlobalObject::global_context should be a global context.");
    712     pop(holder_reg);
    713   }
    714 
    715   int token_offset = Context::kHeaderSize +
    716                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
    717   mov(scratch, FieldOperand(scratch, token_offset));
    718   cmp(scratch, FieldOperand(holder_reg, token_offset));
    719   pop(holder_reg);
    720   j(not_equal, miss, not_taken);
    721 
    722   bind(&same_contexts);
    723 }
    724 
    725 
    726 void MacroAssembler::LoadAllocationTopHelper(Register result,
    727                                              Register result_end,
    728                                              Register scratch,
    729                                              AllocationFlags flags) {
    730   ExternalReference new_space_allocation_top =
    731       ExternalReference::new_space_allocation_top_address();
    732 
    733   // Just return if allocation top is already known.
    734   if ((flags & RESULT_CONTAINS_TOP) != 0) {
    735     // No use of scratch if allocation top is provided.
    736     ASSERT(scratch.is(no_reg));
    737 #ifdef DEBUG
    738     // Assert that result actually contains top on entry.
    739     cmp(result, Operand::StaticVariable(new_space_allocation_top));
    740     Check(equal, "Unexpected allocation top");
    741 #endif
    742     return;
    743   }
    744 
    745   // Move address of new object to result. Use scratch register if available.
    746   if (scratch.is(no_reg)) {
    747     mov(result, Operand::StaticVariable(new_space_allocation_top));
    748   } else {
    749     ASSERT(!scratch.is(result_end));
    750     mov(Operand(scratch), Immediate(new_space_allocation_top));
    751     mov(result, Operand(scratch, 0));
    752   }
    753 }
    754 
    755 
    756 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
    757                                                Register scratch) {
    758   if (FLAG_debug_code) {
    759     test(result_end, Immediate(kObjectAlignmentMask));
    760     Check(zero, "Unaligned allocation in new space");
    761   }
    762 
    763   ExternalReference new_space_allocation_top =
    764       ExternalReference::new_space_allocation_top_address();
    765 
    766   // Update new top. Use scratch if available.
    767   if (scratch.is(no_reg)) {
    768     mov(Operand::StaticVariable(new_space_allocation_top), result_end);
    769   } else {
    770     mov(Operand(scratch, 0), result_end);
    771   }
    772 }
    773 
    774 
    775 void MacroAssembler::AllocateInNewSpace(int object_size,
    776                                         Register result,
    777                                         Register result_end,
    778                                         Register scratch,
    779                                         Label* gc_required,
    780                                         AllocationFlags flags) {
    781   ASSERT(!result.is(result_end));
    782 
    783   // Load address of new object into result.
    784   LoadAllocationTopHelper(result, result_end, scratch, flags);
    785 
    786   // Calculate new top and bail out if new space is exhausted.
    787   ExternalReference new_space_allocation_limit =
    788       ExternalReference::new_space_allocation_limit_address();
    789   lea(result_end, Operand(result, object_size));
    790   cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
    791   j(above, gc_required, not_taken);
    792 
    793   // Tag result if requested.
    794   if ((flags & TAG_OBJECT) != 0) {
    795     lea(result, Operand(result, kHeapObjectTag));
    796   }
    797 
    798   // Update allocation top.
    799   UpdateAllocationTopHelper(result_end, scratch);
    800 }
    801 
    802 
    803 void MacroAssembler::AllocateInNewSpace(int header_size,
    804                                         ScaleFactor element_size,
    805                                         Register element_count,
    806                                         Register result,
    807                                         Register result_end,
    808                                         Register scratch,
    809                                         Label* gc_required,
    810                                         AllocationFlags flags) {
    811   ASSERT(!result.is(result_end));
    812 
    813   // Load address of new object into result.
    814   LoadAllocationTopHelper(result, result_end, scratch, flags);
    815 
    816   // Calculate new top and bail out if new space is exhausted.
    817   ExternalReference new_space_allocation_limit =
    818       ExternalReference::new_space_allocation_limit_address();
    819   lea(result_end, Operand(result, element_count, element_size, header_size));
    820   cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
    821   j(above, gc_required);
    822 
    823   // Tag result if requested.
    824   if ((flags & TAG_OBJECT) != 0) {
    825     lea(result, Operand(result, kHeapObjectTag));
    826   }
    827 
    828   // Update allocation top.
    829   UpdateAllocationTopHelper(result_end, scratch);
    830 }
    831 
    832 
    833 void MacroAssembler::AllocateInNewSpace(Register object_size,
    834                                         Register result,
    835                                         Register result_end,
    836                                         Register scratch,
    837                                         Label* gc_required,
    838                                         AllocationFlags flags) {
    839   ASSERT(!result.is(result_end));
    840 
    841   // Load address of new object into result.
    842   LoadAllocationTopHelper(result, result_end, scratch, flags);
    843 
    844   // Calculate new top and bail out if new space is exhausted.
    845   ExternalReference new_space_allocation_limit =
    846       ExternalReference::new_space_allocation_limit_address();
    847   if (!object_size.is(result_end)) {
    848     mov(result_end, object_size);
    849   }
    850   add(result_end, Operand(result));
    851   cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
    852   j(above, gc_required, not_taken);
    853 
    854   // Tag result if requested.
    855   if ((flags & TAG_OBJECT) != 0) {
    856     lea(result, Operand(result, kHeapObjectTag));
    857   }
    858 
    859   // Update allocation top.
    860   UpdateAllocationTopHelper(result_end, scratch);
    861 }
    862 
    863 
    864 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
    865   ExternalReference new_space_allocation_top =
    866       ExternalReference::new_space_allocation_top_address();
    867 
    868   // Make sure the object has no tag before resetting top.
    869   and_(Operand(object), Immediate(~kHeapObjectTagMask));
    870 #ifdef DEBUG
    871   cmp(object, Operand::StaticVariable(new_space_allocation_top));
    872   Check(below, "Undo allocation of non allocated memory");
    873 #endif
    874   mov(Operand::StaticVariable(new_space_allocation_top), object);
    875 }
    876 
    877 
    878 void MacroAssembler::AllocateHeapNumber(Register result,
    879                                         Register scratch1,
    880                                         Register scratch2,
    881                                         Label* gc_required) {
    882   // Allocate heap number in new space.
    883   AllocateInNewSpace(HeapNumber::kSize,
    884                      result,
    885                      scratch1,
    886                      scratch2,
    887                      gc_required,
    888                      TAG_OBJECT);
    889 
    890   // Set the map.
    891   mov(FieldOperand(result, HeapObject::kMapOffset),
    892       Immediate(Factory::heap_number_map()));
    893 }
    894 
    895 
    896 void MacroAssembler::AllocateTwoByteString(Register result,
    897                                            Register length,
    898                                            Register scratch1,
    899                                            Register scratch2,
    900                                            Register scratch3,
    901                                            Label* gc_required) {
    902   // Calculate the number of bytes needed for the characters in the string while
    903   // observing object alignment.
    904   ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
    905   ASSERT(kShortSize == 2);
    906   // scratch1 = length * 2 + kObjectAlignmentMask.
    907   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
    908   and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
    909 
    910   // Allocate two byte string in new space.
    911   AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
    912                      times_1,
    913                      scratch1,
    914                      result,
    915                      scratch2,
    916                      scratch3,
    917                      gc_required,
    918                      TAG_OBJECT);
    919 
    920   // Set the map, length and hash field.
    921   mov(FieldOperand(result, HeapObject::kMapOffset),
    922       Immediate(Factory::string_map()));
    923   mov(FieldOperand(result, String::kLengthOffset), length);
    924   mov(FieldOperand(result, String::kHashFieldOffset),
    925       Immediate(String::kEmptyHashField));
    926 }
    927 
    928 
    929 void MacroAssembler::AllocateAsciiString(Register result,
    930                                          Register length,
    931                                          Register scratch1,
    932                                          Register scratch2,
    933                                          Register scratch3,
    934                                          Label* gc_required) {
    935   // Calculate the number of bytes needed for the characters in the string while
    936   // observing object alignment.
    937   ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
    938   mov(scratch1, length);
    939   ASSERT(kCharSize == 1);
    940   add(Operand(scratch1), Immediate(kObjectAlignmentMask));
    941   and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
    942 
    943   // Allocate ascii string in new space.
    944   AllocateInNewSpace(SeqAsciiString::kHeaderSize,
    945                      times_1,
    946                      scratch1,
    947                      result,
    948                      scratch2,
    949                      scratch3,
    950                      gc_required,
    951                      TAG_OBJECT);
    952 
    953   // Set the map, length and hash field.
    954   mov(FieldOperand(result, HeapObject::kMapOffset),
    955       Immediate(Factory::ascii_string_map()));
    956   mov(FieldOperand(result, String::kLengthOffset), length);
    957   mov(FieldOperand(result, String::kHashFieldOffset),
    958       Immediate(String::kEmptyHashField));
    959 }
    960 
    961 
    962 void MacroAssembler::AllocateConsString(Register result,
    963                                         Register scratch1,
    964                                         Register scratch2,
    965                                         Label* gc_required) {
    966   // Allocate heap number in new space.
    967   AllocateInNewSpace(ConsString::kSize,
    968                      result,
    969                      scratch1,
    970                      scratch2,
    971                      gc_required,
    972                      TAG_OBJECT);
    973 
    974   // Set the map. The other fields are left uninitialized.
    975   mov(FieldOperand(result, HeapObject::kMapOffset),
    976       Immediate(Factory::cons_string_map()));
    977 }
    978 
    979 
    980 void MacroAssembler::AllocateAsciiConsString(Register result,
    981                                              Register scratch1,
    982                                              Register scratch2,
    983                                              Label* gc_required) {
    984   // Allocate heap number in new space.
    985   AllocateInNewSpace(ConsString::kSize,
    986                      result,
    987                      scratch1,
    988                      scratch2,
    989                      gc_required,
    990                      TAG_OBJECT);
    991 
    992   // Set the map. The other fields are left uninitialized.
    993   mov(FieldOperand(result, HeapObject::kMapOffset),
    994       Immediate(Factory::cons_ascii_string_map()));
    995 }
    996 
    997 
    998 void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
    999                                       Register result,
   1000                                       Register op,
   1001                                       JumpTarget* then_target) {
   1002   JumpTarget ok;
   1003   test(result, Operand(result));
   1004   ok.Branch(not_zero, taken);
   1005   test(op, Operand(op));
   1006   then_target->Branch(sign, not_taken);
   1007   ok.Bind();
   1008 }
   1009 
   1010 
   1011 void MacroAssembler::NegativeZeroTest(Register result,
   1012                                       Register op,
   1013                                       Label* then_label) {
   1014   Label ok;
   1015   test(result, Operand(result));
   1016   j(not_zero, &ok, taken);
   1017   test(op, Operand(op));
   1018   j(sign, then_label, not_taken);
   1019   bind(&ok);
   1020 }
   1021 
   1022 
   1023 void MacroAssembler::NegativeZeroTest(Register result,
   1024                                       Register op1,
   1025                                       Register op2,
   1026                                       Register scratch,
   1027                                       Label* then_label) {
   1028   Label ok;
   1029   test(result, Operand(result));
   1030   j(not_zero, &ok, taken);
   1031   mov(scratch, Operand(op1));
   1032   or_(scratch, Operand(op2));
   1033   j(sign, then_label, not_taken);
   1034   bind(&ok);
   1035 }
   1036 
   1037 
   1038 void MacroAssembler::TryGetFunctionPrototype(Register function,
   1039                                              Register result,
   1040                                              Register scratch,
   1041                                              Label* miss) {
   1042   // Check that the receiver isn't a smi.
   1043   test(function, Immediate(kSmiTagMask));
   1044   j(zero, miss, not_taken);
   1045 
   1046   // Check that the function really is a function.
   1047   CmpObjectType(function, JS_FUNCTION_TYPE, result);
   1048   j(not_equal, miss, not_taken);
   1049 
   1050   // Make sure that the function has an instance prototype.
   1051   Label non_instance;
   1052   movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
   1053   test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
   1054   j(not_zero, &non_instance, not_taken);
   1055 
   1056   // Get the prototype or initial map from the function.
   1057   mov(result,
   1058       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   1059 
   1060   // If the prototype or initial map is the hole, don't return it and
   1061   // simply miss the cache instead. This will allow us to allocate a
   1062   // prototype object on-demand in the runtime system.
   1063   cmp(Operand(result), Immediate(Factory::the_hole_value()));
   1064   j(equal, miss, not_taken);
   1065 
   1066   // If the function does not have an initial map, we're done.
   1067   Label done;
   1068   CmpObjectType(result, MAP_TYPE, scratch);
   1069   j(not_equal, &done);
   1070 
   1071   // Get the prototype from the initial map.
   1072   mov(result, FieldOperand(result, Map::kPrototypeOffset));
   1073   jmp(&done);
   1074 
   1075   // Non-instance prototype: Fetch prototype from constructor field
   1076   // in initial map.
   1077   bind(&non_instance);
   1078   mov(result, FieldOperand(result, Map::kConstructorOffset));
   1079 
   1080   // All done.
   1081   bind(&done);
   1082 }
   1083 
   1084 
   1085 void MacroAssembler::CallStub(CodeStub* stub) {
   1086   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
   1087   call(stub->GetCode(), RelocInfo::CODE_TARGET);
   1088 }
   1089 
   1090 
   1091 Object* MacroAssembler::TryCallStub(CodeStub* stub) {
   1092   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
   1093   Object* result = stub->TryGetCode();
   1094   if (!result->IsFailure()) {
   1095     call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
   1096   }
   1097   return result;
   1098 }
   1099 
   1100 
   1101 void MacroAssembler::TailCallStub(CodeStub* stub) {
   1102   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
   1103   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
   1104 }
   1105 
   1106 
   1107 Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
   1108   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
   1109   Object* result = stub->TryGetCode();
   1110   if (!result->IsFailure()) {
   1111     jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
   1112   }
   1113   return result;
   1114 }
   1115 
   1116 
   1117 void MacroAssembler::StubReturn(int argc) {
   1118   ASSERT(argc >= 1 && generating_stub());
   1119   ret((argc - 1) * kPointerSize);
   1120 }
   1121 
   1122 
   1123 void MacroAssembler::IllegalOperation(int num_arguments) {
   1124   if (num_arguments > 0) {
   1125     add(Operand(esp), Immediate(num_arguments * kPointerSize));
   1126   }
   1127   mov(eax, Immediate(Factory::undefined_value()));
   1128 }
   1129 
   1130 
   1131 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
   1132   CallRuntime(Runtime::FunctionForId(id), num_arguments);
   1133 }
   1134 
   1135 
   1136 Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
   1137                                        int num_arguments) {
   1138   return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
   1139 }
   1140 
   1141 
   1142 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
   1143   // If the expected number of arguments of the runtime function is
   1144   // constant, we check that the actual number of arguments match the
   1145   // expectation.
   1146   if (f->nargs >= 0 && f->nargs != num_arguments) {
   1147     IllegalOperation(num_arguments);
   1148     return;
   1149   }
   1150 
   1151   // TODO(1236192): Most runtime routines don't need the number of
   1152   // arguments passed in because it is constant. At some point we
   1153   // should remove this need and make the runtime routine entry code
   1154   // smarter.
   1155   Set(eax, Immediate(num_arguments));
   1156   mov(ebx, Immediate(ExternalReference(f)));
   1157   CEntryStub ces(1);
   1158   CallStub(&ces);
   1159 }
   1160 
   1161 
   1162 void MacroAssembler::CallExternalReference(ExternalReference ref,
   1163                                            int num_arguments) {
   1164   mov(eax, Immediate(num_arguments));
   1165   mov(ebx, Immediate(ref));
   1166 
   1167   CEntryStub stub(1);
   1168   CallStub(&stub);
   1169 }
   1170 
   1171 
   1172 Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
   1173                                        int num_arguments) {
   1174   if (f->nargs >= 0 && f->nargs != num_arguments) {
   1175     IllegalOperation(num_arguments);
   1176     // Since we did not call the stub, there was no allocation failure.
   1177     // Return some non-failure object.
   1178     return Heap::undefined_value();
   1179   }
   1180 
   1181   // TODO(1236192): Most runtime routines don't need the number of
   1182   // arguments passed in because it is constant. At some point we
   1183   // should remove this need and make the runtime routine entry code
   1184   // smarter.
   1185   Set(eax, Immediate(num_arguments));
   1186   mov(ebx, Immediate(ExternalReference(f)));
   1187   CEntryStub ces(1);
   1188   return TryCallStub(&ces);
   1189 }
   1190 
   1191 
   1192 void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
   1193                                      int num_arguments,
   1194                                      int result_size) {
   1195   // TODO(1236192): Most runtime routines don't need the number of
   1196   // arguments passed in because it is constant. At some point we
   1197   // should remove this need and make the runtime routine entry code
   1198   // smarter.
   1199   Set(eax, Immediate(num_arguments));
   1200   JumpToRuntime(ext);
   1201 }
   1202 
   1203 
   1204 void MacroAssembler::PushHandleScope(Register scratch) {
   1205   // Push the number of extensions, smi-tagged so the gc will ignore it.
   1206   ExternalReference extensions_address =
   1207       ExternalReference::handle_scope_extensions_address();
   1208   mov(scratch, Operand::StaticVariable(extensions_address));
   1209   ASSERT_EQ(0, kSmiTag);
   1210   shl(scratch, kSmiTagSize);
   1211   push(scratch);
   1212   mov(Operand::StaticVariable(extensions_address), Immediate(0));
   1213   // Push next and limit pointers which will be wordsize aligned and
   1214   // hence automatically smi tagged.
   1215   ExternalReference next_address =
   1216       ExternalReference::handle_scope_next_address();
   1217   push(Operand::StaticVariable(next_address));
   1218   ExternalReference limit_address =
   1219       ExternalReference::handle_scope_limit_address();
   1220   push(Operand::StaticVariable(limit_address));
   1221 }
   1222 
   1223 
   1224 Object* MacroAssembler::PopHandleScopeHelper(Register saved,
   1225                                              Register scratch,
   1226                                              bool gc_allowed) {
   1227   Object* result = NULL;
   1228   ExternalReference extensions_address =
   1229         ExternalReference::handle_scope_extensions_address();
   1230   Label write_back;
   1231   mov(scratch, Operand::StaticVariable(extensions_address));
   1232   cmp(Operand(scratch), Immediate(0));
   1233   j(equal, &write_back);
   1234   // Calling a runtime function messes with registers so we save and
   1235   // restore any one we're asked not to change
   1236   if (saved.is_valid()) push(saved);
   1237   if (gc_allowed) {
   1238     CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
   1239   } else {
   1240     result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
   1241     if (result->IsFailure()) return result;
   1242   }
   1243   if (saved.is_valid()) pop(saved);
   1244 
   1245   bind(&write_back);
   1246   ExternalReference limit_address =
   1247         ExternalReference::handle_scope_limit_address();
   1248   pop(Operand::StaticVariable(limit_address));
   1249   ExternalReference next_address =
   1250         ExternalReference::handle_scope_next_address();
   1251   pop(Operand::StaticVariable(next_address));
   1252   pop(scratch);
   1253   shr(scratch, kSmiTagSize);
   1254   mov(Operand::StaticVariable(extensions_address), scratch);
   1255 
   1256   return result;
   1257 }
   1258 
   1259 
   1260 void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
   1261   PopHandleScopeHelper(saved, scratch, true);
   1262 }
   1263 
   1264 
   1265 Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
   1266   return PopHandleScopeHelper(saved, scratch, false);
   1267 }
   1268 
   1269 
   1270 void MacroAssembler::JumpToRuntime(const ExternalReference& ext) {
   1271   // Set the entry point and jump to the C entry runtime stub.
   1272   mov(ebx, Immediate(ext));
   1273   CEntryStub ces(1);
   1274   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
   1275 }
   1276 
   1277 
   1278 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
   1279                                     const ParameterCount& actual,
   1280                                     Handle<Code> code_constant,
   1281                                     const Operand& code_operand,
   1282                                     Label* done,
   1283                                     InvokeFlag flag) {
   1284   bool definitely_matches = false;
   1285   Label invoke;
   1286   if (expected.is_immediate()) {
   1287     ASSERT(actual.is_immediate());
   1288     if (expected.immediate() == actual.immediate()) {
   1289       definitely_matches = true;
   1290     } else {
   1291       mov(eax, actual.immediate());
   1292       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   1293       if (expected.immediate() == sentinel) {
   1294         // Don't worry about adapting arguments for builtins that
   1295         // don't want that done. Skip adaption code by making it look
   1296         // like we have a match between expected and actual number of
   1297         // arguments.
   1298         definitely_matches = true;
   1299       } else {
   1300         mov(ebx, expected.immediate());
   1301       }
   1302     }
   1303   } else {
   1304     if (actual.is_immediate()) {
   1305       // Expected is in register, actual is immediate. This is the
   1306       // case when we invoke function values without going through the
   1307       // IC mechanism.
   1308       cmp(expected.reg(), actual.immediate());
   1309       j(equal, &invoke);
   1310       ASSERT(expected.reg().is(ebx));
   1311       mov(eax, actual.immediate());
   1312     } else if (!expected.reg().is(actual.reg())) {
   1313       // Both expected and actual are in (different) registers. This
   1314       // is the case when we invoke functions using call and apply.
   1315       cmp(expected.reg(), Operand(actual.reg()));
   1316       j(equal, &invoke);
   1317       ASSERT(actual.reg().is(eax));
   1318       ASSERT(expected.reg().is(ebx));
   1319     }
   1320   }
   1321 
   1322   if (!definitely_matches) {
   1323     Handle<Code> adaptor =
   1324         Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
   1325     if (!code_constant.is_null()) {
   1326       mov(edx, Immediate(code_constant));
   1327       add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
   1328     } else if (!code_operand.is_reg(edx)) {
   1329       mov(edx, code_operand);
   1330     }
   1331 
   1332     if (flag == CALL_FUNCTION) {
   1333       call(adaptor, RelocInfo::CODE_TARGET);
   1334       jmp(done);
   1335     } else {
   1336       jmp(adaptor, RelocInfo::CODE_TARGET);
   1337     }
   1338     bind(&invoke);
   1339   }
   1340 }
   1341 
   1342 
   1343 void MacroAssembler::InvokeCode(const Operand& code,
   1344                                 const ParameterCount& expected,
   1345                                 const ParameterCount& actual,
   1346                                 InvokeFlag flag) {
   1347   Label done;
   1348   InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
   1349   if (flag == CALL_FUNCTION) {
   1350     call(code);
   1351   } else {
   1352     ASSERT(flag == JUMP_FUNCTION);
   1353     jmp(code);
   1354   }
   1355   bind(&done);
   1356 }
   1357 
   1358 
   1359 void MacroAssembler::InvokeCode(Handle<Code> code,
   1360                                 const ParameterCount& expected,
   1361                                 const ParameterCount& actual,
   1362                                 RelocInfo::Mode rmode,
   1363                                 InvokeFlag flag) {
   1364   Label done;
   1365   Operand dummy(eax);
   1366   InvokePrologue(expected, actual, code, dummy, &done, flag);
   1367   if (flag == CALL_FUNCTION) {
   1368     call(code, rmode);
   1369   } else {
   1370     ASSERT(flag == JUMP_FUNCTION);
   1371     jmp(code, rmode);
   1372   }
   1373   bind(&done);
   1374 }
   1375 
   1376 
   1377 void MacroAssembler::InvokeFunction(Register fun,
   1378                                     const ParameterCount& actual,
   1379                                     InvokeFlag flag) {
   1380   ASSERT(fun.is(edi));
   1381   mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   1382   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   1383   mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
   1384   mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
   1385   lea(edx, FieldOperand(edx, Code::kHeaderSize));
   1386 
   1387   ParameterCount expected(ebx);
   1388   InvokeCode(Operand(edx), expected, actual, flag);
   1389 }
   1390 
   1391 
   1392 void MacroAssembler::InvokeFunction(JSFunction* function,
   1393                                     const ParameterCount& actual,
   1394                                     InvokeFlag flag) {
   1395   ASSERT(function->is_compiled());
   1396   // Get the function and setup the context.
   1397   mov(edi, Immediate(Handle<JSFunction>(function)));
   1398   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   1399 
   1400   // Invoke the cached code.
   1401   Handle<Code> code(function->code());
   1402   ParameterCount expected(function->shared()->formal_parameter_count());
   1403   InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
   1404 }
   1405 
   1406 
   1407 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
   1408   // Calls are not allowed in some stubs.
   1409   ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
   1410 
   1411   // Rely on the assertion to check that the number of provided
   1412   // arguments match the expected number of arguments. Fake a
   1413   // parameter count to avoid emitting code to do the check.
   1414   ParameterCount expected(0);
   1415   GetBuiltinEntry(edx, id);
   1416   InvokeCode(Operand(edx), expected, expected, flag);
   1417 }
   1418 
   1419 
   1420 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
   1421   // Load the JavaScript builtin function from the builtins object.
   1422   mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
   1423   mov(edi, FieldOperand(edi, GlobalObject::kBuiltinsOffset));
   1424   int builtins_offset =
   1425       JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
   1426   mov(edi, FieldOperand(edi, builtins_offset));
   1427   // Load the code entry point from the function into the target register.
   1428   mov(target, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   1429   mov(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
   1430   add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
   1431 }
   1432 
   1433 
   1434 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
   1435   if (context_chain_length > 0) {
   1436     // Move up the chain of contexts to the context containing the slot.
   1437     mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
   1438     // Load the function context (which is the incoming, outer context).
   1439     mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
   1440     for (int i = 1; i < context_chain_length; i++) {
   1441       mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
   1442       mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
   1443     }
   1444     // The context may be an intermediate context, not a function context.
   1445     mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
   1446   } else {  // Slot is in the current function context.
   1447     // The context may be an intermediate context, not a function context.
   1448     mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
   1449   }
   1450 }
   1451 
   1452 
   1453 
   1454 void MacroAssembler::Ret() {
   1455   ret(0);
   1456 }
   1457 
   1458 
   1459 void MacroAssembler::Drop(int stack_elements) {
   1460   if (stack_elements > 0) {
   1461     add(Operand(esp), Immediate(stack_elements * kPointerSize));
   1462   }
   1463 }
   1464 
   1465 
   1466 void MacroAssembler::Move(Register dst, Handle<Object> value) {
   1467   mov(dst, value);
   1468 }
   1469 
   1470 
   1471 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
   1472   if (FLAG_native_code_counters && counter->Enabled()) {
   1473     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
   1474   }
   1475 }
   1476 
   1477 
   1478 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
   1479   ASSERT(value > 0);
   1480   if (FLAG_native_code_counters && counter->Enabled()) {
   1481     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   1482     if (value == 1) {
   1483       inc(operand);
   1484     } else {
   1485       add(operand, Immediate(value));
   1486     }
   1487   }
   1488 }
   1489 
   1490 
   1491 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
   1492   ASSERT(value > 0);
   1493   if (FLAG_native_code_counters && counter->Enabled()) {
   1494     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   1495     if (value == 1) {
   1496       dec(operand);
   1497     } else {
   1498       sub(operand, Immediate(value));
   1499     }
   1500   }
   1501 }
   1502 
   1503 
   1504 void MacroAssembler::IncrementCounter(Condition cc,
   1505                                       StatsCounter* counter,
   1506                                       int value) {
   1507   ASSERT(value > 0);
   1508   if (FLAG_native_code_counters && counter->Enabled()) {
   1509     Label skip;
   1510     j(NegateCondition(cc), &skip);
   1511     pushfd();
   1512     IncrementCounter(counter, value);
   1513     popfd();
   1514     bind(&skip);
   1515   }
   1516 }
   1517 
   1518 
   1519 void MacroAssembler::DecrementCounter(Condition cc,
   1520                                       StatsCounter* counter,
   1521                                       int value) {
   1522   ASSERT(value > 0);
   1523   if (FLAG_native_code_counters && counter->Enabled()) {
   1524     Label skip;
   1525     j(NegateCondition(cc), &skip);
   1526     pushfd();
   1527     DecrementCounter(counter, value);
   1528     popfd();
   1529     bind(&skip);
   1530   }
   1531 }
   1532 
   1533 
   1534 void MacroAssembler::Assert(Condition cc, const char* msg) {
   1535   if (FLAG_debug_code) Check(cc, msg);
   1536 }
   1537 
   1538 
   1539 void MacroAssembler::Check(Condition cc, const char* msg) {
   1540   Label L;
   1541   j(cc, &L, taken);
   1542   Abort(msg);
   1543   // will not return here
   1544   bind(&L);
   1545 }
   1546 
   1547 
   1548 void MacroAssembler::Abort(const char* msg) {
   1549   // We want to pass the msg string like a smi to avoid GC
   1550   // problems, however msg is not guaranteed to be aligned
   1551   // properly. Instead, we pass an aligned pointer that is
   1552   // a proper v8 smi, but also pass the alignment difference
   1553   // from the real pointer as a smi.
   1554   intptr_t p1 = reinterpret_cast<intptr_t>(msg);
   1555   intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
   1556   ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
   1557 #ifdef DEBUG
   1558   if (msg != NULL) {
   1559     RecordComment("Abort message: ");
   1560     RecordComment(msg);
   1561   }
   1562 #endif
   1563   // Disable stub call restrictions to always allow calls to abort.
   1564   set_allow_stub_calls(true);
   1565 
   1566   push(eax);
   1567   push(Immediate(p0));
   1568   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
   1569   CallRuntime(Runtime::kAbort, 2);
   1570   // will not return here
   1571   int3();
   1572 }
   1573 
   1574 
   1575 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
   1576     Register instance_type,
   1577     Register scratch,
   1578     Label *failure) {
   1579   if (!scratch.is(instance_type)) {
   1580     mov(scratch, instance_type);
   1581   }
   1582   and_(scratch,
   1583        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
   1584   cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
   1585   j(not_equal, failure);
   1586 }
   1587 
   1588 
   1589 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
   1590                                                          Register object2,
   1591                                                          Register scratch1,
   1592                                                          Register scratch2,
   1593                                                          Label* failure) {
   1594   // Check that both objects are not smis.
   1595   ASSERT_EQ(0, kSmiTag);
   1596   mov(scratch1, Operand(object1));
   1597   and_(scratch1, Operand(object2));
   1598   test(scratch1, Immediate(kSmiTagMask));
   1599   j(zero, failure);
   1600 
   1601   // Load instance type for both strings.
   1602   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
   1603   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
   1604   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
   1605   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
   1606 
   1607   // Check that both are flat ascii strings.
   1608   const int kFlatAsciiStringMask =
   1609       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
   1610   const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
   1611   // Interleave bits from both instance types and compare them in one check.
   1612   ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
   1613   and_(scratch1, kFlatAsciiStringMask);
   1614   and_(scratch2, kFlatAsciiStringMask);
   1615   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
   1616   cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
   1617   j(not_equal, failure);
   1618 }
   1619 
   1620 
   1621 CodePatcher::CodePatcher(byte* address, int size)
   1622     : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
   1623   // Create a new macro assembler pointing to the address of the code to patch.
   1624   // The size is adjusted with kGap on order for the assembler to generate size
   1625   // bytes of instructions without failing with buffer size constraints.
   1626   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   1627 }
   1628 
   1629 
   1630 CodePatcher::~CodePatcher() {
   1631   // Indicate that code has changed.
   1632   CPU::FlushICache(address_, size_);
   1633 
   1634   // Check that the code was patched as expected.
   1635   ASSERT(masm_.pc_ == address_ + size_);
   1636   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   1637 }
   1638 
   1639 
   1640 } }  // namespace v8::internal
   1641