Home | History | Annotate | Download | only in ia32
      1 // Copyright 2011 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_IA32)
     31 
     32 #include "bootstrapper.h"
     33 #include "codegen.h"
     34 #include "debug.h"
     35 #include "runtime.h"
     36 #include "serialize.h"
     37 
     38 namespace v8 {
     39 namespace internal {
     40 
     41 // -------------------------------------------------------------------------
     42 // MacroAssembler implementation.
     43 
     44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     45     : Assembler(arg_isolate, buffer, size),
     46       generating_stub_(false),
     47       allow_stub_calls_(true) {
     48   if (isolate() != NULL) {
     49     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
     50                                   isolate());
     51   }
     52 }
     53 
     54 
     55 void MacroAssembler::RecordWriteHelper(Register object,
     56                                        Register addr,
     57                                        Register scratch) {
     58   if (emit_debug_code()) {
     59     // Check that the object is not in new space.
     60     Label not_in_new_space;
     61     InNewSpace(object, scratch, not_equal, &not_in_new_space);
     62     Abort("new-space object passed to RecordWriteHelper");
     63     bind(&not_in_new_space);
     64   }
     65 
     66   // Compute the page start address from the heap object pointer, and reuse
     67   // the 'object' register for it.
     68   and_(object, ~Page::kPageAlignmentMask);
     69 
     70   // Compute number of region covering addr. See Page::GetRegionNumberForAddress
     71   // method for more details.
     72   and_(addr, Page::kPageAlignmentMask);
     73   shr(addr, Page::kRegionSizeLog2);
     74 
     75   // Set dirty mark for region.
     76   bts(Operand(object, Page::kDirtyFlagOffset), addr);
     77 }
     78 
     79 
     80 void MacroAssembler::RecordWrite(Register object,
     81                                  int offset,
     82                                  Register value,
     83                                  Register scratch) {
     84   // First, check if a write barrier is even needed. The tests below
     85   // catch stores of Smis and stores into young gen.
     86   NearLabel done;
     87 
     88   // Skip barrier if writing a smi.
     89   ASSERT_EQ(0, kSmiTag);
     90   test(value, Immediate(kSmiTagMask));
     91   j(zero, &done);
     92 
     93   InNewSpace(object, value, equal, &done);
     94 
     95   // The offset is relative to a tagged or untagged HeapObject pointer,
     96   // so either offset or offset + kHeapObjectTag must be a
     97   // multiple of kPointerSize.
     98   ASSERT(IsAligned(offset, kPointerSize) ||
     99          IsAligned(offset + kHeapObjectTag, kPointerSize));
    100 
    101   Register dst = scratch;
    102   if (offset != 0) {
    103     lea(dst, Operand(object, offset));
    104   } else {
    105     // Array access: calculate the destination address in the same manner as
    106     // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
    107     // into an array of words.
    108     ASSERT_EQ(1, kSmiTagSize);
    109     ASSERT_EQ(0, kSmiTag);
    110     lea(dst, Operand(object, dst, times_half_pointer_size,
    111                      FixedArray::kHeaderSize - kHeapObjectTag));
    112   }
    113   RecordWriteHelper(object, dst, value);
    114 
    115   bind(&done);
    116 
    117   // Clobber all input registers when running with the debug-code flag
    118   // turned on to provoke errors.
    119   if (emit_debug_code()) {
    120     mov(object, Immediate(BitCast<int32_t>(kZapValue)));
    121     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    122     mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
    123   }
    124 }
    125 
    126 
    127 void MacroAssembler::RecordWrite(Register object,
    128                                  Register address,
    129                                  Register value) {
    130   // First, check if a write barrier is even needed. The tests below
    131   // catch stores of Smis and stores into young gen.
    132   Label done;
    133 
    134   // Skip barrier if writing a smi.
    135   ASSERT_EQ(0, kSmiTag);
    136   test(value, Immediate(kSmiTagMask));
    137   j(zero, &done);
    138 
    139   InNewSpace(object, value, equal, &done);
    140 
    141   RecordWriteHelper(object, address, value);
    142 
    143   bind(&done);
    144 
    145   // Clobber all input registers when running with the debug-code flag
    146   // turned on to provoke errors.
    147   if (emit_debug_code()) {
    148     mov(object, Immediate(BitCast<int32_t>(kZapValue)));
    149     mov(address, Immediate(BitCast<int32_t>(kZapValue)));
    150     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
    151   }
    152 }
    153 
    154 
    155 #ifdef ENABLE_DEBUGGER_SUPPORT
    156 void MacroAssembler::DebugBreak() {
    157   Set(eax, Immediate(0));
    158   mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
    159   CEntryStub ces(1);
    160   call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
    161 }
    162 #endif
    163 
    164 
    165 void MacroAssembler::Set(Register dst, const Immediate& x) {
    166   if (x.is_zero()) {
    167     xor_(dst, Operand(dst));  // Shorter than mov.
    168   } else {
    169     mov(dst, x);
    170   }
    171 }
    172 
    173 
    174 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
    175   mov(dst, x);
    176 }
    177 
    178 
    179 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
    180   static const int kMaxImmediateBits = 17;
    181   if (x.rmode_ != RelocInfo::NONE) return false;
    182   return !is_intn(x.x_, kMaxImmediateBits);
    183 }
    184 
    185 
    186 void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
    187   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    188     Set(dst, Immediate(x.x_ ^ jit_cookie()));
    189     xor_(dst, jit_cookie());
    190   } else {
    191     Set(dst, x);
    192   }
    193 }
    194 
    195 
    196 void MacroAssembler::SafePush(const Immediate& x) {
    197   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
    198     push(Immediate(x.x_ ^ jit_cookie()));
    199     xor_(Operand(esp, 0), Immediate(jit_cookie()));
    200   } else {
    201     push(x);
    202   }
    203 }
    204 
    205 
    206 void MacroAssembler::CmpObjectType(Register heap_object,
    207                                    InstanceType type,
    208                                    Register map) {
    209   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    210   CmpInstanceType(map, type);
    211 }
    212 
    213 
    214 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    215   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
    216        static_cast<int8_t>(type));
    217 }
    218 
    219 
    220 void MacroAssembler::CheckMap(Register obj,
    221                               Handle<Map> map,
    222                               Label* fail,
    223                               bool is_heap_object) {
    224   if (!is_heap_object) {
    225     test(obj, Immediate(kSmiTagMask));
    226     j(zero, fail);
    227   }
    228   cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
    229   j(not_equal, fail);
    230 }
    231 
    232 
    233 Condition MacroAssembler::IsObjectStringType(Register heap_object,
    234                                              Register map,
    235                                              Register instance_type) {
    236   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    237   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    238   ASSERT(kNotStringTag != 0);
    239   test(instance_type, Immediate(kIsNotStringMask));
    240   return zero;
    241 }
    242 
    243 
    244 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
    245                                           Register map,
    246                                           Register scratch,
    247                                           Label* fail) {
    248   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    249   IsInstanceJSObjectType(map, scratch, fail);
    250 }
    251 
    252 
    253 void MacroAssembler::IsInstanceJSObjectType(Register map,
    254                                             Register scratch,
    255                                             Label* fail) {
    256   movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
    257   sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
    258   cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
    259   j(above, fail);
    260 }
    261 
    262 
    263 void MacroAssembler::FCmp() {
    264   if (CpuFeatures::IsSupported(CMOV)) {
    265     fucomip();
    266     ffree(0);
    267     fincstp();
    268   } else {
    269     fucompp();
    270     push(eax);
    271     fnstsw_ax();
    272     sahf();
    273     pop(eax);
    274   }
    275 }
    276 
    277 
    278 void MacroAssembler::AbortIfNotNumber(Register object) {
    279   Label ok;
    280   test(object, Immediate(kSmiTagMask));
    281   j(zero, &ok);
    282   cmp(FieldOperand(object, HeapObject::kMapOffset),
    283       isolate()->factory()->heap_number_map());
    284   Assert(equal, "Operand not a number");
    285   bind(&ok);
    286 }
    287 
    288 
    289 void MacroAssembler::AbortIfNotSmi(Register object) {
    290   test(object, Immediate(kSmiTagMask));
    291   Assert(equal, "Operand is not a smi");
    292 }
    293 
    294 
    295 void MacroAssembler::AbortIfNotString(Register object) {
    296   test(object, Immediate(kSmiTagMask));
    297   Assert(not_equal, "Operand is not a string");
    298   push(object);
    299   mov(object, FieldOperand(object, HeapObject::kMapOffset));
    300   CmpInstanceType(object, FIRST_NONSTRING_TYPE);
    301   pop(object);
    302   Assert(below, "Operand is not a string");
    303 }
    304 
    305 
    306 void MacroAssembler::AbortIfSmi(Register object) {
    307   test(object, Immediate(kSmiTagMask));
    308   Assert(not_equal, "Operand is a smi");
    309 }
    310 
    311 
    312 void MacroAssembler::EnterFrame(StackFrame::Type type) {
    313   push(ebp);
    314   mov(ebp, Operand(esp));
    315   push(esi);
    316   push(Immediate(Smi::FromInt(type)));
    317   push(Immediate(CodeObject()));
    318   if (emit_debug_code()) {
    319     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
    320     Check(not_equal, "code object not properly patched");
    321   }
    322 }
    323 
    324 
    325 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
    326   if (emit_debug_code()) {
    327     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
    328         Immediate(Smi::FromInt(type)));
    329     Check(equal, "stack frame types must match");
    330   }
    331   leave();
    332 }
    333 
    334 
    335 void MacroAssembler::EnterExitFramePrologue() {
    336   // Setup the frame structure on the stack.
    337   ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
    338   ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
    339   ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
    340   push(ebp);
    341   mov(ebp, Operand(esp));
    342 
    343   // Reserve room for entry stack pointer and push the code object.
    344   ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
    345   push(Immediate(0));  // Saved entry sp, patched before call.
    346   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
    347 
    348   // Save the frame pointer and the context in top.
    349   ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
    350                                        isolate());
    351   ExternalReference context_address(Isolate::k_context_address,
    352                                     isolate());
    353   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
    354   mov(Operand::StaticVariable(context_address), esi);
    355 }
    356 
    357 
    358 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
    359   // Optionally save all XMM registers.
    360   if (save_doubles) {
    361     CpuFeatures::Scope scope(SSE2);
    362     int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
    363     sub(Operand(esp), Immediate(space));
    364     const int offset = -2 * kPointerSize;
    365     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
    366       XMMRegister reg = XMMRegister::from_code(i);
    367       movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
    368     }
    369   } else {
    370     sub(Operand(esp), Immediate(argc * kPointerSize));
    371   }
    372 
    373   // Get the required frame alignment for the OS.
    374   const int kFrameAlignment = OS::ActivationFrameAlignment();
    375   if (kFrameAlignment > 0) {
    376     ASSERT(IsPowerOf2(kFrameAlignment));
    377     and_(esp, -kFrameAlignment);
    378   }
    379 
    380   // Patch the saved entry sp.
    381   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
    382 }
    383 
    384 
    385 void MacroAssembler::EnterExitFrame(bool save_doubles) {
    386   EnterExitFramePrologue();
    387 
    388   // Setup argc and argv in callee-saved registers.
    389   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
    390   mov(edi, Operand(eax));
    391   lea(esi, Operand(ebp, eax, times_4, offset));
    392 
    393   // Reserve space for argc, argv and isolate.
    394   EnterExitFrameEpilogue(3, save_doubles);
    395 }
    396 
    397 
    398 void MacroAssembler::EnterApiExitFrame(int argc) {
    399   EnterExitFramePrologue();
    400   EnterExitFrameEpilogue(argc, false);
    401 }
    402 
    403 
    404 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
    405   // Optionally restore all XMM registers.
    406   if (save_doubles) {
    407     CpuFeatures::Scope scope(SSE2);
    408     const int offset = -2 * kPointerSize;
    409     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
    410       XMMRegister reg = XMMRegister::from_code(i);
    411       movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
    412     }
    413   }
    414 
    415   // Get the return address from the stack and restore the frame pointer.
    416   mov(ecx, Operand(ebp, 1 * kPointerSize));
    417   mov(ebp, Operand(ebp, 0 * kPointerSize));
    418 
    419   // Pop the arguments and the receiver from the caller stack.
    420   lea(esp, Operand(esi, 1 * kPointerSize));
    421 
    422   // Push the return address to get ready to return.
    423   push(ecx);
    424 
    425   LeaveExitFrameEpilogue();
    426 }
    427 
    428 void MacroAssembler::LeaveExitFrameEpilogue() {
    429   // Restore current context from top and clear it in debug mode.
    430   ExternalReference context_address(Isolate::k_context_address, isolate());
    431   mov(esi, Operand::StaticVariable(context_address));
    432 #ifdef DEBUG
    433   mov(Operand::StaticVariable(context_address), Immediate(0));
    434 #endif
    435 
    436   // Clear the top frame.
    437   ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
    438                                        isolate());
    439   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
    440 }
    441 
    442 
    443 void MacroAssembler::LeaveApiExitFrame() {
    444   mov(esp, Operand(ebp));
    445   pop(ebp);
    446 
    447   LeaveExitFrameEpilogue();
    448 }
    449 
    450 
    451 void MacroAssembler::PushTryHandler(CodeLocation try_location,
    452                                     HandlerType type) {
    453   // Adjust this code if not the case.
    454   ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
    455   // The pc (return address) is already on TOS.
    456   if (try_location == IN_JAVASCRIPT) {
    457     if (type == TRY_CATCH_HANDLER) {
    458       push(Immediate(StackHandler::TRY_CATCH));
    459     } else {
    460       push(Immediate(StackHandler::TRY_FINALLY));
    461     }
    462     push(ebp);
    463   } else {
    464     ASSERT(try_location == IN_JS_ENTRY);
    465     // The frame pointer does not point to a JS frame so we save NULL
    466     // for ebp. We expect the code throwing an exception to check ebp
    467     // before dereferencing it to restore the context.
    468     push(Immediate(StackHandler::ENTRY));
    469     push(Immediate(0));  // NULL frame pointer.
    470   }
    471   // Save the current handler as the next handler.
    472   push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
    473                                                  isolate())));
    474   // Link this handler as the new current one.
    475   mov(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
    476                                                 isolate())),
    477       esp);
    478 }
    479 
    480 
    481 void MacroAssembler::PopTryHandler() {
    482   ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
    483   pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
    484                                                 isolate())));
    485   add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
    486 }
    487 
    488 
    489 void MacroAssembler::Throw(Register value) {
    490   // Adjust this code if not the case.
    491   STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
    492 
    493   // eax must hold the exception.
    494   if (!value.is(eax)) {
    495     mov(eax, value);
    496   }
    497 
    498   // Drop the sp to the top of the handler.
    499   ExternalReference handler_address(Isolate::k_handler_address,
    500                                     isolate());
    501   mov(esp, Operand::StaticVariable(handler_address));
    502 
    503   // Restore next handler and frame pointer, discard handler state.
    504   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    505   pop(Operand::StaticVariable(handler_address));
    506   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
    507   pop(ebp);
    508   pop(edx);  // Remove state.
    509 
    510   // Before returning we restore the context from the frame pointer if
    511   // not NULL.  The frame pointer is NULL in the exception handler of
    512   // a JS entry frame.
    513   Set(esi, Immediate(0));  // Tentatively set context pointer to NULL.
    514   NearLabel skip;
    515   cmp(ebp, 0);
    516   j(equal, &skip, not_taken);
    517   mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
    518   bind(&skip);
    519 
    520   STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
    521   ret(0);
    522 }
    523 
    524 
    525 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
    526                                       Register value) {
    527   // Adjust this code if not the case.
    528   STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
    529 
    530   // eax must hold the exception.
    531   if (!value.is(eax)) {
    532     mov(eax, value);
    533   }
    534 
    535   // Drop sp to the top stack handler.
    536   ExternalReference handler_address(Isolate::k_handler_address,
    537                                     isolate());
    538   mov(esp, Operand::StaticVariable(handler_address));
    539 
    540   // Unwind the handlers until the ENTRY handler is found.
    541   NearLabel loop, done;
    542   bind(&loop);
    543   // Load the type of the current stack handler.
    544   const int kStateOffset = StackHandlerConstants::kStateOffset;
    545   cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
    546   j(equal, &done);
    547   // Fetch the next handler in the list.
    548   const int kNextOffset = StackHandlerConstants::kNextOffset;
    549   mov(esp, Operand(esp, kNextOffset));
    550   jmp(&loop);
    551   bind(&done);
    552 
    553   // Set the top handler address to next handler past the current ENTRY handler.
    554   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    555   pop(Operand::StaticVariable(handler_address));
    556 
    557   if (type == OUT_OF_MEMORY) {
    558     // Set external caught exception to false.
    559     ExternalReference external_caught(
    560         Isolate::k_external_caught_exception_address,
    561         isolate());
    562     mov(eax, false);
    563     mov(Operand::StaticVariable(external_caught), eax);
    564 
    565     // Set pending exception and eax to out of memory exception.
    566     ExternalReference pending_exception(Isolate::k_pending_exception_address,
    567                                         isolate());
    568     mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
    569     mov(Operand::StaticVariable(pending_exception), eax);
    570   }
    571 
    572   // Clear the context pointer.
    573   Set(esi, Immediate(0));
    574 
    575   // Restore fp from handler and discard handler state.
    576   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
    577   pop(ebp);
    578   pop(edx);  // State.
    579 
    580   STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
    581   ret(0);
    582 }
    583 
    584 
    585 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
    586                                             Register scratch,
    587                                             Label* miss) {
    588   Label same_contexts;
    589 
    590   ASSERT(!holder_reg.is(scratch));
    591 
    592   // Load current lexical context from the stack frame.
    593   mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
    594 
    595   // When generating debug code, make sure the lexical context is set.
    596   if (emit_debug_code()) {
    597     cmp(Operand(scratch), Immediate(0));
    598     Check(not_equal, "we should not have an empty lexical context");
    599   }
    600   // Load the global context of the current context.
    601   int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
    602   mov(scratch, FieldOperand(scratch, offset));
    603   mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
    604 
    605   // Check the context is a global context.
    606   if (emit_debug_code()) {
    607     push(scratch);
    608     // Read the first word and compare to global_context_map.
    609     mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
    610     cmp(scratch, isolate()->factory()->global_context_map());
    611     Check(equal, "JSGlobalObject::global_context should be a global context.");
    612     pop(scratch);
    613   }
    614 
    615   // Check if both contexts are the same.
    616   cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
    617   j(equal, &same_contexts, taken);
    618 
    619   // Compare security tokens, save holder_reg on the stack so we can use it
    620   // as a temporary register.
    621   //
    622   // TODO(119): avoid push(holder_reg)/pop(holder_reg)
    623   push(holder_reg);
    624   // Check that the security token in the calling global object is
    625   // compatible with the security token in the receiving global
    626   // object.
    627   mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
    628 
    629   // Check the context is a global context.
    630   if (emit_debug_code()) {
    631     cmp(holder_reg, isolate()->factory()->null_value());
    632     Check(not_equal, "JSGlobalProxy::context() should not be null.");
    633 
    634     push(holder_reg);
    635     // Read the first word and compare to global_context_map(),
    636     mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
    637     cmp(holder_reg, isolate()->factory()->global_context_map());
    638     Check(equal, "JSGlobalObject::global_context should be a global context.");
    639     pop(holder_reg);
    640   }
    641 
    642   int token_offset = Context::kHeaderSize +
    643                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
    644   mov(scratch, FieldOperand(scratch, token_offset));
    645   cmp(scratch, FieldOperand(holder_reg, token_offset));
    646   pop(holder_reg);
    647   j(not_equal, miss, not_taken);
    648 
    649   bind(&same_contexts);
    650 }
    651 
    652 
    653 void MacroAssembler::LoadAllocationTopHelper(Register result,
    654                                              Register scratch,
    655                                              AllocationFlags flags) {
    656   ExternalReference new_space_allocation_top =
    657       ExternalReference::new_space_allocation_top_address(isolate());
    658 
    659   // Just return if allocation top is already known.
    660   if ((flags & RESULT_CONTAINS_TOP) != 0) {
    661     // No use of scratch if allocation top is provided.
    662     ASSERT(scratch.is(no_reg));
    663 #ifdef DEBUG
    664     // Assert that result actually contains top on entry.
    665     cmp(result, Operand::StaticVariable(new_space_allocation_top));
    666     Check(equal, "Unexpected allocation top");
    667 #endif
    668     return;
    669   }
    670 
    671   // Move address of new object to result. Use scratch register if available.
    672   if (scratch.is(no_reg)) {
    673     mov(result, Operand::StaticVariable(new_space_allocation_top));
    674   } else {
    675     mov(Operand(scratch), Immediate(new_space_allocation_top));
    676     mov(result, Operand(scratch, 0));
    677   }
    678 }
    679 
    680 
    681 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
    682                                                Register scratch) {
    683   if (emit_debug_code()) {
    684     test(result_end, Immediate(kObjectAlignmentMask));
    685     Check(zero, "Unaligned allocation in new space");
    686   }
    687 
    688   ExternalReference new_space_allocation_top =
    689       ExternalReference::new_space_allocation_top_address(isolate());
    690 
    691   // Update new top. Use scratch if available.
    692   if (scratch.is(no_reg)) {
    693     mov(Operand::StaticVariable(new_space_allocation_top), result_end);
    694   } else {
    695     mov(Operand(scratch, 0), result_end);
    696   }
    697 }
    698 
    699 
    700 void MacroAssembler::AllocateInNewSpace(int object_size,
    701                                         Register result,
    702                                         Register result_end,
    703                                         Register scratch,
    704                                         Label* gc_required,
    705                                         AllocationFlags flags) {
    706   if (!FLAG_inline_new) {
    707     if (emit_debug_code()) {
    708       // Trash the registers to simulate an allocation failure.
    709       mov(result, Immediate(0x7091));
    710       if (result_end.is_valid()) {
    711         mov(result_end, Immediate(0x7191));
    712       }
    713       if (scratch.is_valid()) {
    714         mov(scratch, Immediate(0x7291));
    715       }
    716     }
    717     jmp(gc_required);
    718     return;
    719   }
    720   ASSERT(!result.is(result_end));
    721 
    722   // Load address of new object into result.
    723   LoadAllocationTopHelper(result, scratch, flags);
    724 
    725   Register top_reg = result_end.is_valid() ? result_end : result;
    726 
    727   // Calculate new top and bail out if new space is exhausted.
    728   ExternalReference new_space_allocation_limit =
    729       ExternalReference::new_space_allocation_limit_address(isolate());
    730 
    731   if (!top_reg.is(result)) {
    732     mov(top_reg, result);
    733   }
    734   add(Operand(top_reg), Immediate(object_size));
    735   j(carry, gc_required, not_taken);
    736   cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
    737   j(above, gc_required, not_taken);
    738 
    739   // Update allocation top.
    740   UpdateAllocationTopHelper(top_reg, scratch);
    741 
    742   // Tag result if requested.
    743   if (top_reg.is(result)) {
    744     if ((flags & TAG_OBJECT) != 0) {
    745       sub(Operand(result), Immediate(object_size - kHeapObjectTag));
    746     } else {
    747       sub(Operand(result), Immediate(object_size));
    748     }
    749   } else if ((flags & TAG_OBJECT) != 0) {
    750     add(Operand(result), Immediate(kHeapObjectTag));
    751   }
    752 }
    753 
    754 
    755 void MacroAssembler::AllocateInNewSpace(int header_size,
    756                                         ScaleFactor element_size,
    757                                         Register element_count,
    758                                         Register result,
    759                                         Register result_end,
    760                                         Register scratch,
    761                                         Label* gc_required,
    762                                         AllocationFlags flags) {
    763   if (!FLAG_inline_new) {
    764     if (emit_debug_code()) {
    765       // Trash the registers to simulate an allocation failure.
    766       mov(result, Immediate(0x7091));
    767       mov(result_end, Immediate(0x7191));
    768       if (scratch.is_valid()) {
    769         mov(scratch, Immediate(0x7291));
    770       }
    771       // Register element_count is not modified by the function.
    772     }
    773     jmp(gc_required);
    774     return;
    775   }
    776   ASSERT(!result.is(result_end));
    777 
    778   // Load address of new object into result.
    779   LoadAllocationTopHelper(result, scratch, flags);
    780 
    781   // Calculate new top and bail out if new space is exhausted.
    782   ExternalReference new_space_allocation_limit =
    783       ExternalReference::new_space_allocation_limit_address(isolate());
    784 
    785   // We assume that element_count*element_size + header_size does not
    786   // overflow.
    787   lea(result_end, Operand(element_count, element_size, header_size));
    788   add(result_end, Operand(result));
    789   j(carry, gc_required);
    790   cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
    791   j(above, gc_required);
    792 
    793   // Tag result if requested.
    794   if ((flags & TAG_OBJECT) != 0) {
    795     lea(result, Operand(result, kHeapObjectTag));
    796   }
    797 
    798   // Update allocation top.
    799   UpdateAllocationTopHelper(result_end, scratch);
    800 }
    801 
    802 
    803 void MacroAssembler::AllocateInNewSpace(Register object_size,
    804                                         Register result,
    805                                         Register result_end,
    806                                         Register scratch,
    807                                         Label* gc_required,
    808                                         AllocationFlags flags) {
    809   if (!FLAG_inline_new) {
    810     if (emit_debug_code()) {
    811       // Trash the registers to simulate an allocation failure.
    812       mov(result, Immediate(0x7091));
    813       mov(result_end, Immediate(0x7191));
    814       if (scratch.is_valid()) {
    815         mov(scratch, Immediate(0x7291));
    816       }
    817       // object_size is left unchanged by this function.
    818     }
    819     jmp(gc_required);
    820     return;
    821   }
    822   ASSERT(!result.is(result_end));
    823 
    824   // Load address of new object into result.
    825   LoadAllocationTopHelper(result, scratch, flags);
    826 
    827   // Calculate new top and bail out if new space is exhausted.
    828   ExternalReference new_space_allocation_limit =
    829       ExternalReference::new_space_allocation_limit_address(isolate());
    830   if (!object_size.is(result_end)) {
    831     mov(result_end, object_size);
    832   }
    833   add(result_end, Operand(result));
    834   j(carry, gc_required, not_taken);
    835   cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
    836   j(above, gc_required, not_taken);
    837 
    838   // Tag result if requested.
    839   if ((flags & TAG_OBJECT) != 0) {
    840     lea(result, Operand(result, kHeapObjectTag));
    841   }
    842 
    843   // Update allocation top.
    844   UpdateAllocationTopHelper(result_end, scratch);
    845 }
    846 
    847 
    848 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
    849   ExternalReference new_space_allocation_top =
    850       ExternalReference::new_space_allocation_top_address(isolate());
    851 
    852   // Make sure the object has no tag before resetting top.
    853   and_(Operand(object), Immediate(~kHeapObjectTagMask));
    854 #ifdef DEBUG
    855   cmp(object, Operand::StaticVariable(new_space_allocation_top));
    856   Check(below, "Undo allocation of non allocated memory");
    857 #endif
    858   mov(Operand::StaticVariable(new_space_allocation_top), object);
    859 }
    860 
    861 
    862 void MacroAssembler::AllocateHeapNumber(Register result,
    863                                         Register scratch1,
    864                                         Register scratch2,
    865                                         Label* gc_required) {
    866   // Allocate heap number in new space.
    867   AllocateInNewSpace(HeapNumber::kSize,
    868                      result,
    869                      scratch1,
    870                      scratch2,
    871                      gc_required,
    872                      TAG_OBJECT);
    873 
    874   // Set the map.
    875   mov(FieldOperand(result, HeapObject::kMapOffset),
    876       Immediate(isolate()->factory()->heap_number_map()));
    877 }
    878 
    879 
    880 void MacroAssembler::AllocateTwoByteString(Register result,
    881                                            Register length,
    882                                            Register scratch1,
    883                                            Register scratch2,
    884                                            Register scratch3,
    885                                            Label* gc_required) {
    886   // Calculate the number of bytes needed for the characters in the string while
    887   // observing object alignment.
    888   ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
    889   ASSERT(kShortSize == 2);
    890   // scratch1 = length * 2 + kObjectAlignmentMask.
    891   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
    892   and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
    893 
    894   // Allocate two byte string in new space.
    895   AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
    896                      times_1,
    897                      scratch1,
    898                      result,
    899                      scratch2,
    900                      scratch3,
    901                      gc_required,
    902                      TAG_OBJECT);
    903 
    904   // Set the map, length and hash field.
    905   mov(FieldOperand(result, HeapObject::kMapOffset),
    906       Immediate(isolate()->factory()->string_map()));
    907   mov(scratch1, length);
    908   SmiTag(scratch1);
    909   mov(FieldOperand(result, String::kLengthOffset), scratch1);
    910   mov(FieldOperand(result, String::kHashFieldOffset),
    911       Immediate(String::kEmptyHashField));
    912 }
    913 
    914 
    915 void MacroAssembler::AllocateAsciiString(Register result,
    916                                          Register length,
    917                                          Register scratch1,
    918                                          Register scratch2,
    919                                          Register scratch3,
    920                                          Label* gc_required) {
    921   // Calculate the number of bytes needed for the characters in the string while
    922   // observing object alignment.
    923   ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
    924   mov(scratch1, length);
    925   ASSERT(kCharSize == 1);
    926   add(Operand(scratch1), Immediate(kObjectAlignmentMask));
    927   and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
    928 
    929   // Allocate ascii string in new space.
    930   AllocateInNewSpace(SeqAsciiString::kHeaderSize,
    931                      times_1,
    932                      scratch1,
    933                      result,
    934                      scratch2,
    935                      scratch3,
    936                      gc_required,
    937                      TAG_OBJECT);
    938 
    939   // Set the map, length and hash field.
    940   mov(FieldOperand(result, HeapObject::kMapOffset),
    941       Immediate(isolate()->factory()->ascii_string_map()));
    942   mov(scratch1, length);
    943   SmiTag(scratch1);
    944   mov(FieldOperand(result, String::kLengthOffset), scratch1);
    945   mov(FieldOperand(result, String::kHashFieldOffset),
    946       Immediate(String::kEmptyHashField));
    947 }
    948 
    949 
    950 void MacroAssembler::AllocateAsciiString(Register result,
    951                                          int length,
    952                                          Register scratch1,
    953                                          Register scratch2,
    954                                          Label* gc_required) {
    955   ASSERT(length > 0);
    956 
    957   // Allocate ascii string in new space.
    958   AllocateInNewSpace(SeqAsciiString::SizeFor(length),
    959                      result,
    960                      scratch1,
    961                      scratch2,
    962                      gc_required,
    963                      TAG_OBJECT);
    964 
    965   // Set the map, length and hash field.
    966   mov(FieldOperand(result, HeapObject::kMapOffset),
    967       Immediate(isolate()->factory()->ascii_string_map()));
    968   mov(FieldOperand(result, String::kLengthOffset),
    969       Immediate(Smi::FromInt(length)));
    970   mov(FieldOperand(result, String::kHashFieldOffset),
    971       Immediate(String::kEmptyHashField));
    972 }
    973 
    974 
    975 void MacroAssembler::AllocateConsString(Register result,
    976                                         Register scratch1,
    977                                         Register scratch2,
    978                                         Label* gc_required) {
    979   // Allocate heap number in new space.
    980   AllocateInNewSpace(ConsString::kSize,
    981                      result,
    982                      scratch1,
    983                      scratch2,
    984                      gc_required,
    985                      TAG_OBJECT);
    986 
    987   // Set the map. The other fields are left uninitialized.
    988   mov(FieldOperand(result, HeapObject::kMapOffset),
    989       Immediate(isolate()->factory()->cons_string_map()));
    990 }
    991 
    992 
    993 void MacroAssembler::AllocateAsciiConsString(Register result,
    994                                              Register scratch1,
    995                                              Register scratch2,
    996                                              Label* gc_required) {
    997   // Allocate heap number in new space.
    998   AllocateInNewSpace(ConsString::kSize,
    999                      result,
   1000                      scratch1,
   1001                      scratch2,
   1002                      gc_required,
   1003                      TAG_OBJECT);
   1004 
   1005   // Set the map. The other fields are left uninitialized.
   1006   mov(FieldOperand(result, HeapObject::kMapOffset),
   1007       Immediate(isolate()->factory()->cons_ascii_string_map()));
   1008 }
   1009 
   1010 
   1011 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
   1012 // long or aligned copies.  The contents of scratch and length are destroyed.
   1013 // Source and destination are incremented by length.
   1014 // Many variants of movsb, loop unrolling, word moves, and indexed operands
   1015 // have been tried here already, and this is fastest.
   1016 // A simpler loop is faster on small copies, but 30% slower on large ones.
   1017 // The cld() instruction must have been emitted, to set the direction flag(),
   1018 // before calling this function.
   1019 void MacroAssembler::CopyBytes(Register source,
   1020                                Register destination,
   1021                                Register length,
   1022                                Register scratch) {
   1023   Label loop, done, short_string, short_loop;
   1024   // Experimentation shows that the short string loop is faster if length < 10.
   1025   cmp(Operand(length), Immediate(10));
   1026   j(less_equal, &short_string);
   1027 
   1028   ASSERT(source.is(esi));
   1029   ASSERT(destination.is(edi));
   1030   ASSERT(length.is(ecx));
   1031 
   1032   // Because source is 4-byte aligned in our uses of this function,
   1033   // we keep source aligned for the rep_movs call by copying the odd bytes
   1034   // at the end of the ranges.
   1035   mov(scratch, Operand(source, length, times_1, -4));
   1036   mov(Operand(destination, length, times_1, -4), scratch);
   1037   mov(scratch, ecx);
   1038   shr(ecx, 2);
   1039   rep_movs();
   1040   and_(Operand(scratch), Immediate(0x3));
   1041   add(destination, Operand(scratch));
   1042   jmp(&done);
   1043 
   1044   bind(&short_string);
   1045   test(length, Operand(length));
   1046   j(zero, &done);
   1047 
   1048   bind(&short_loop);
   1049   mov_b(scratch, Operand(source, 0));
   1050   mov_b(Operand(destination, 0), scratch);
   1051   inc(source);
   1052   inc(destination);
   1053   dec(length);
   1054   j(not_zero, &short_loop);
   1055 
   1056   bind(&done);
   1057 }
   1058 
   1059 
   1060 void MacroAssembler::NegativeZeroTest(Register result,
   1061                                       Register op,
   1062                                       Label* then_label) {
   1063   Label ok;
   1064   test(result, Operand(result));
   1065   j(not_zero, &ok, taken);
   1066   test(op, Operand(op));
   1067   j(sign, then_label, not_taken);
   1068   bind(&ok);
   1069 }
   1070 
   1071 
   1072 void MacroAssembler::NegativeZeroTest(Register result,
   1073                                       Register op1,
   1074                                       Register op2,
   1075                                       Register scratch,
   1076                                       Label* then_label) {
   1077   Label ok;
   1078   test(result, Operand(result));
   1079   j(not_zero, &ok, taken);
   1080   mov(scratch, Operand(op1));
   1081   or_(scratch, Operand(op2));
   1082   j(sign, then_label, not_taken);
   1083   bind(&ok);
   1084 }
   1085 
   1086 
   1087 void MacroAssembler::TryGetFunctionPrototype(Register function,
   1088                                              Register result,
   1089                                              Register scratch,
   1090                                              Label* miss) {
   1091   // Check that the receiver isn't a smi.
   1092   test(function, Immediate(kSmiTagMask));
   1093   j(zero, miss, not_taken);
   1094 
   1095   // Check that the function really is a function.
   1096   CmpObjectType(function, JS_FUNCTION_TYPE, result);
   1097   j(not_equal, miss, not_taken);
   1098 
   1099   // Make sure that the function has an instance prototype.
   1100   Label non_instance;
   1101   movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
   1102   test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
   1103   j(not_zero, &non_instance, not_taken);
   1104 
   1105   // Get the prototype or initial map from the function.
   1106   mov(result,
   1107       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   1108 
   1109   // If the prototype or initial map is the hole, don't return it and
   1110   // simply miss the cache instead. This will allow us to allocate a
   1111   // prototype object on-demand in the runtime system.
   1112   cmp(Operand(result), Immediate(isolate()->factory()->the_hole_value()));
   1113   j(equal, miss, not_taken);
   1114 
   1115   // If the function does not have an initial map, we're done.
   1116   Label done;
   1117   CmpObjectType(result, MAP_TYPE, scratch);
   1118   j(not_equal, &done);
   1119 
   1120   // Get the prototype from the initial map.
   1121   mov(result, FieldOperand(result, Map::kPrototypeOffset));
   1122   jmp(&done);
   1123 
   1124   // Non-instance prototype: Fetch prototype from constructor field
   1125   // in initial map.
   1126   bind(&non_instance);
   1127   mov(result, FieldOperand(result, Map::kConstructorOffset));
   1128 
   1129   // All done.
   1130   bind(&done);
   1131 }
   1132 
   1133 
   1134 void MacroAssembler::CallStub(CodeStub* stub) {
   1135   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
   1136   call(stub->GetCode(), RelocInfo::CODE_TARGET);
   1137 }
   1138 
   1139 
   1140 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
   1141   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
   1142   Object* result;
   1143   { MaybeObject* maybe_result = stub->TryGetCode();
   1144     if (!maybe_result->ToObject(&result)) return maybe_result;
   1145   }
   1146   call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
   1147   return result;
   1148 }
   1149 
   1150 
   1151 void MacroAssembler::TailCallStub(CodeStub* stub) {
   1152   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
   1153   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
   1154 }
   1155 
   1156 
   1157 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
   1158   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
   1159   Object* result;
   1160   { MaybeObject* maybe_result = stub->TryGetCode();
   1161     if (!maybe_result->ToObject(&result)) return maybe_result;
   1162   }
   1163   jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
   1164   return result;
   1165 }
   1166 
   1167 
   1168 void MacroAssembler::StubReturn(int argc) {
   1169   ASSERT(argc >= 1 && generating_stub());
   1170   ret((argc - 1) * kPointerSize);
   1171 }
   1172 
   1173 
   1174 void MacroAssembler::IllegalOperation(int num_arguments) {
   1175   if (num_arguments > 0) {
   1176     add(Operand(esp), Immediate(num_arguments * kPointerSize));
   1177   }
   1178   mov(eax, Immediate(isolate()->factory()->undefined_value()));
   1179 }
   1180 
   1181 
   1182 void MacroAssembler::IndexFromHash(Register hash, Register index) {
   1183   // The assert checks that the constants for the maximum number of digits
   1184   // for an array index cached in the hash field and the number of bits
   1185   // reserved for it does not conflict.
   1186   ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
   1187          (1 << String::kArrayIndexValueBits));
   1188   // We want the smi-tagged index in key.  kArrayIndexValueMask has zeros in
   1189   // the low kHashShift bits.
   1190   and_(hash, String::kArrayIndexValueMask);
   1191   STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
   1192   if (String::kHashShift > kSmiTagSize) {
   1193     shr(hash, String::kHashShift - kSmiTagSize);
   1194   }
   1195   if (!index.is(hash)) {
   1196     mov(index, hash);
   1197   }
   1198 }
   1199 
   1200 
   1201 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
   1202   CallRuntime(Runtime::FunctionForId(id), num_arguments);
   1203 }
   1204 
   1205 
   1206 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
   1207   const Runtime::Function* function = Runtime::FunctionForId(id);
   1208   Set(eax, Immediate(function->nargs));
   1209   mov(ebx, Immediate(ExternalReference(function, isolate())));
   1210   CEntryStub ces(1);
   1211   ces.SaveDoubles();
   1212   CallStub(&ces);
   1213 }
   1214 
   1215 
   1216 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
   1217                                             int num_arguments) {
   1218   return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
   1219 }
   1220 
   1221 
   1222 void MacroAssembler::CallRuntime(const Runtime::Function* f,
   1223                                  int num_arguments) {
   1224   // If the expected number of arguments of the runtime function is
   1225   // constant, we check that the actual number of arguments match the
   1226   // expectation.
   1227   if (f->nargs >= 0 && f->nargs != num_arguments) {
   1228     IllegalOperation(num_arguments);
   1229     return;
   1230   }
   1231 
   1232   // TODO(1236192): Most runtime routines don't need the number of
   1233   // arguments passed in because it is constant. At some point we
   1234   // should remove this need and make the runtime routine entry code
   1235   // smarter.
   1236   Set(eax, Immediate(num_arguments));
   1237   mov(ebx, Immediate(ExternalReference(f, isolate())));
   1238   CEntryStub ces(1);
   1239   CallStub(&ces);
   1240 }
   1241 
   1242 
   1243 MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
   1244                                             int num_arguments) {
   1245   if (f->nargs >= 0 && f->nargs != num_arguments) {
   1246     IllegalOperation(num_arguments);
   1247     // Since we did not call the stub, there was no allocation failure.
   1248     // Return some non-failure object.
   1249     return isolate()->heap()->undefined_value();
   1250   }
   1251 
   1252   // TODO(1236192): Most runtime routines don't need the number of
   1253   // arguments passed in because it is constant. At some point we
   1254   // should remove this need and make the runtime routine entry code
   1255   // smarter.
   1256   Set(eax, Immediate(num_arguments));
   1257   mov(ebx, Immediate(ExternalReference(f, isolate())));
   1258   CEntryStub ces(1);
   1259   return TryCallStub(&ces);
   1260 }
   1261 
   1262 
   1263 void MacroAssembler::CallExternalReference(ExternalReference ref,
   1264                                            int num_arguments) {
   1265   mov(eax, Immediate(num_arguments));
   1266   mov(ebx, Immediate(ref));
   1267 
   1268   CEntryStub stub(1);
   1269   CallStub(&stub);
   1270 }
   1271 
   1272 
   1273 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
   1274                                                int num_arguments,
   1275                                                int result_size) {
   1276   // TODO(1236192): Most runtime routines don't need the number of
   1277   // arguments passed in because it is constant. At some point we
   1278   // should remove this need and make the runtime routine entry code
   1279   // smarter.
   1280   Set(eax, Immediate(num_arguments));
   1281   JumpToExternalReference(ext);
   1282 }
   1283 
   1284 
   1285 MaybeObject* MacroAssembler::TryTailCallExternalReference(
   1286     const ExternalReference& ext, int num_arguments, int result_size) {
   1287   // TODO(1236192): Most runtime routines don't need the number of
   1288   // arguments passed in because it is constant. At some point we
   1289   // should remove this need and make the runtime routine entry code
   1290   // smarter.
   1291   Set(eax, Immediate(num_arguments));
   1292   return TryJumpToExternalReference(ext);
   1293 }
   1294 
   1295 
   1296 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
   1297                                      int num_arguments,
   1298                                      int result_size) {
   1299   TailCallExternalReference(ExternalReference(fid, isolate()),
   1300                             num_arguments,
   1301                             result_size);
   1302 }
   1303 
   1304 
   1305 MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
   1306                                                 int num_arguments,
   1307                                                 int result_size) {
   1308   return TryTailCallExternalReference(
   1309       ExternalReference(fid, isolate()), num_arguments, result_size);
   1310 }
   1311 
   1312 
   1313 // If true, a Handle<T> returned by value from a function with cdecl calling
   1314 // convention will be returned directly as a value of location_ field in a
   1315 // register eax.
   1316 // If false, it is returned as a pointer to a preallocated by caller memory
   1317 // region. Pointer to this region should be passed to a function as an
   1318 // implicit first argument.
   1319 #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
   1320 static const bool kReturnHandlesDirectly = true;
   1321 #else
   1322 static const bool kReturnHandlesDirectly = false;
   1323 #endif
   1324 
   1325 
   1326 Operand ApiParameterOperand(int index) {
   1327   return Operand(
   1328       esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
   1329 }
   1330 
   1331 
   1332 void MacroAssembler::PrepareCallApiFunction(int argc, Register scratch) {
   1333   if (kReturnHandlesDirectly) {
   1334     EnterApiExitFrame(argc);
   1335     // When handles are returned directly we don't have to allocate extra
   1336     // space for and pass an out parameter.
   1337   } else {
   1338     // We allocate two additional slots: return value and pointer to it.
   1339     EnterApiExitFrame(argc + 2);
   1340 
   1341     // The argument slots are filled as follows:
   1342     //
   1343     //   n + 1: output cell
   1344     //   n: arg n
   1345     //   ...
   1346     //   1: arg1
   1347     //   0: pointer to the output cell
   1348     //
   1349     // Note that this is one more "argument" than the function expects
   1350     // so the out cell will have to be popped explicitly after returning
   1351     // from the function. The out cell contains Handle.
   1352 
   1353     // pointer to out cell.
   1354     lea(scratch, Operand(esp, (argc + 1) * kPointerSize));
   1355     mov(Operand(esp, 0 * kPointerSize), scratch);  // output.
   1356     if (emit_debug_code()) {
   1357       mov(Operand(esp, (argc + 1) * kPointerSize), Immediate(0));  // out cell.
   1358     }
   1359   }
   1360 }
   1361 
   1362 
   1363 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
   1364                                                          int stack_space) {
   1365   ExternalReference next_address =
   1366       ExternalReference::handle_scope_next_address();
   1367   ExternalReference limit_address =
   1368       ExternalReference::handle_scope_limit_address();
   1369   ExternalReference level_address =
   1370       ExternalReference::handle_scope_level_address();
   1371 
   1372   // Allocate HandleScope in callee-save registers.
   1373   mov(ebx, Operand::StaticVariable(next_address));
   1374   mov(edi, Operand::StaticVariable(limit_address));
   1375   add(Operand::StaticVariable(level_address), Immediate(1));
   1376 
   1377   // Call the api function!
   1378   call(function->address(), RelocInfo::RUNTIME_ENTRY);
   1379 
   1380   if (!kReturnHandlesDirectly) {
   1381     // The returned value is a pointer to the handle holding the result.
   1382     // Dereference this to get to the location.
   1383     mov(eax, Operand(eax, 0));
   1384   }
   1385 
   1386   Label empty_handle;
   1387   Label prologue;
   1388   Label promote_scheduled_exception;
   1389   Label delete_allocated_handles;
   1390   Label leave_exit_frame;
   1391 
   1392   // Check if the result handle holds 0.
   1393   test(eax, Operand(eax));
   1394   j(zero, &empty_handle, not_taken);
   1395   // It was non-zero.  Dereference to get the result value.
   1396   mov(eax, Operand(eax, 0));
   1397   bind(&prologue);
   1398   // No more valid handles (the result handle was the last one). Restore
   1399   // previous handle scope.
   1400   mov(Operand::StaticVariable(next_address), ebx);
   1401   sub(Operand::StaticVariable(level_address), Immediate(1));
   1402   Assert(above_equal, "Invalid HandleScope level");
   1403   cmp(edi, Operand::StaticVariable(limit_address));
   1404   j(not_equal, &delete_allocated_handles, not_taken);
   1405   bind(&leave_exit_frame);
   1406 
   1407   // Check if the function scheduled an exception.
   1408   ExternalReference scheduled_exception_address =
   1409       ExternalReference::scheduled_exception_address(isolate());
   1410   cmp(Operand::StaticVariable(scheduled_exception_address),
   1411       Immediate(isolate()->factory()->the_hole_value()));
   1412   j(not_equal, &promote_scheduled_exception, not_taken);
   1413   LeaveApiExitFrame();
   1414   ret(stack_space * kPointerSize);
   1415   bind(&promote_scheduled_exception);
   1416   MaybeObject* result =
   1417       TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
   1418   if (result->IsFailure()) {
   1419     return result;
   1420   }
   1421   bind(&empty_handle);
   1422   // It was zero; the result is undefined.
   1423   mov(eax, isolate()->factory()->undefined_value());
   1424   jmp(&prologue);
   1425 
   1426   // HandleScope limit has changed. Delete allocated extensions.
   1427   ExternalReference delete_extensions =
   1428       ExternalReference::delete_handle_scope_extensions(isolate());
   1429   bind(&delete_allocated_handles);
   1430   mov(Operand::StaticVariable(limit_address), edi);
   1431   mov(edi, eax);
   1432   mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
   1433   mov(eax, Immediate(delete_extensions));
   1434   call(Operand(eax));
   1435   mov(eax, edi);
   1436   jmp(&leave_exit_frame);
   1437 
   1438   return result;
   1439 }
   1440 
   1441 
   1442 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
   1443   // Set the entry point and jump to the C entry runtime stub.
   1444   mov(ebx, Immediate(ext));
   1445   CEntryStub ces(1);
   1446   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
   1447 }
   1448 
   1449 
   1450 MaybeObject* MacroAssembler::TryJumpToExternalReference(
   1451     const ExternalReference& ext) {
   1452   // Set the entry point and jump to the C entry runtime stub.
   1453   mov(ebx, Immediate(ext));
   1454   CEntryStub ces(1);
   1455   return TryTailCallStub(&ces);
   1456 }
   1457 
   1458 
   1459 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
   1460                                     const ParameterCount& actual,
   1461                                     Handle<Code> code_constant,
   1462                                     const Operand& code_operand,
   1463                                     NearLabel* done,
   1464                                     InvokeFlag flag,
   1465                                     PostCallGenerator* post_call_generator) {
   1466   bool definitely_matches = false;
   1467   Label invoke;
   1468   if (expected.is_immediate()) {
   1469     ASSERT(actual.is_immediate());
   1470     if (expected.immediate() == actual.immediate()) {
   1471       definitely_matches = true;
   1472     } else {
   1473       mov(eax, actual.immediate());
   1474       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   1475       if (expected.immediate() == sentinel) {
   1476         // Don't worry about adapting arguments for builtins that
   1477         // don't want that done. Skip adaption code by making it look
   1478         // like we have a match between expected and actual number of
   1479         // arguments.
   1480         definitely_matches = true;
   1481       } else {
   1482         mov(ebx, expected.immediate());
   1483       }
   1484     }
   1485   } else {
   1486     if (actual.is_immediate()) {
   1487       // Expected is in register, actual is immediate. This is the
   1488       // case when we invoke function values without going through the
   1489       // IC mechanism.
   1490       cmp(expected.reg(), actual.immediate());
   1491       j(equal, &invoke);
   1492       ASSERT(expected.reg().is(ebx));
   1493       mov(eax, actual.immediate());
   1494     } else if (!expected.reg().is(actual.reg())) {
   1495       // Both expected and actual are in (different) registers. This
   1496       // is the case when we invoke functions using call and apply.
   1497       cmp(expected.reg(), Operand(actual.reg()));
   1498       j(equal, &invoke);
   1499       ASSERT(actual.reg().is(eax));
   1500       ASSERT(expected.reg().is(ebx));
   1501     }
   1502   }
   1503 
   1504   if (!definitely_matches) {
   1505     Handle<Code> adaptor =
   1506         isolate()->builtins()->ArgumentsAdaptorTrampoline();
   1507     if (!code_constant.is_null()) {
   1508       mov(edx, Immediate(code_constant));
   1509       add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
   1510     } else if (!code_operand.is_reg(edx)) {
   1511       mov(edx, code_operand);
   1512     }
   1513 
   1514     if (flag == CALL_FUNCTION) {
   1515       call(adaptor, RelocInfo::CODE_TARGET);
   1516       if (post_call_generator != NULL) post_call_generator->Generate();
   1517       jmp(done);
   1518     } else {
   1519       jmp(adaptor, RelocInfo::CODE_TARGET);
   1520     }
   1521     bind(&invoke);
   1522   }
   1523 }
   1524 
   1525 
   1526 void MacroAssembler::InvokeCode(const Operand& code,
   1527                                 const ParameterCount& expected,
   1528                                 const ParameterCount& actual,
   1529                                 InvokeFlag flag,
   1530                                 PostCallGenerator* post_call_generator) {
   1531   NearLabel done;
   1532   InvokePrologue(expected, actual, Handle<Code>::null(), code,
   1533                  &done, flag, post_call_generator);
   1534   if (flag == CALL_FUNCTION) {
   1535     call(code);
   1536     if (post_call_generator != NULL) post_call_generator->Generate();
   1537   } else {
   1538     ASSERT(flag == JUMP_FUNCTION);
   1539     jmp(code);
   1540   }
   1541   bind(&done);
   1542 }
   1543 
   1544 
   1545 void MacroAssembler::InvokeCode(Handle<Code> code,
   1546                                 const ParameterCount& expected,
   1547                                 const ParameterCount& actual,
   1548                                 RelocInfo::Mode rmode,
   1549                                 InvokeFlag flag,
   1550                                 PostCallGenerator* post_call_generator) {
   1551   NearLabel done;
   1552   Operand dummy(eax);
   1553   InvokePrologue(expected, actual, code, dummy, &done,
   1554                  flag, post_call_generator);
   1555   if (flag == CALL_FUNCTION) {
   1556     call(code, rmode);
   1557     if (post_call_generator != NULL) post_call_generator->Generate();
   1558   } else {
   1559     ASSERT(flag == JUMP_FUNCTION);
   1560     jmp(code, rmode);
   1561   }
   1562   bind(&done);
   1563 }
   1564 
   1565 
   1566 void MacroAssembler::InvokeFunction(Register fun,
   1567                                     const ParameterCount& actual,
   1568                                     InvokeFlag flag,
   1569                                     PostCallGenerator* post_call_generator) {
   1570   ASSERT(fun.is(edi));
   1571   mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   1572   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   1573   mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
   1574   SmiUntag(ebx);
   1575 
   1576   ParameterCount expected(ebx);
   1577   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   1578              expected, actual, flag, post_call_generator);
   1579 }
   1580 
   1581 
   1582 void MacroAssembler::InvokeFunction(JSFunction* function,
   1583                                     const ParameterCount& actual,
   1584                                     InvokeFlag flag,
   1585                                     PostCallGenerator* post_call_generator) {
   1586   ASSERT(function->is_compiled());
   1587   // Get the function and setup the context.
   1588   mov(edi, Immediate(Handle<JSFunction>(function)));
   1589   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   1590 
   1591   ParameterCount expected(function->shared()->formal_parameter_count());
   1592   if (V8::UseCrankshaft()) {
   1593     // TODO(kasperl): For now, we always call indirectly through the
   1594     // code field in the function to allow recompilation to take effect
   1595     // without changing any of the call sites.
   1596     InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   1597                expected, actual, flag, post_call_generator);
   1598   } else {
   1599     Handle<Code> code(function->code());
   1600     InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET,
   1601                flag, post_call_generator);
   1602   }
   1603 }
   1604 
   1605 
   1606 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
   1607                                    InvokeFlag flag,
   1608                                    PostCallGenerator* post_call_generator) {
   1609   // Calls are not allowed in some stubs.
   1610   ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
   1611 
   1612   // Rely on the assertion to check that the number of provided
   1613   // arguments match the expected number of arguments. Fake a
   1614   // parameter count to avoid emitting code to do the check.
   1615   ParameterCount expected(0);
   1616   GetBuiltinFunction(edi, id);
   1617   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
   1618              expected, expected, flag, post_call_generator);
   1619 }
   1620 
   1621 void MacroAssembler::GetBuiltinFunction(Register target,
   1622                                         Builtins::JavaScript id) {
   1623   // Load the JavaScript builtin function from the builtins object.
   1624   mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
   1625   mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
   1626   mov(target, FieldOperand(target,
   1627                            JSBuiltinsObject::OffsetOfFunctionWithId(id)));
   1628 }
   1629 
   1630 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
   1631   ASSERT(!target.is(edi));
   1632   // Load the JavaScript builtin function from the builtins object.
   1633   GetBuiltinFunction(edi, id);
   1634   // Load the code entry point from the function into the target register.
   1635   mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
   1636 }
   1637 
   1638 
   1639 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
   1640   if (context_chain_length > 0) {
   1641     // Move up the chain of contexts to the context containing the slot.
   1642     mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
   1643     // Load the function context (which is the incoming, outer context).
   1644     mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
   1645     for (int i = 1; i < context_chain_length; i++) {
   1646       mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
   1647       mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
   1648     }
   1649   } else {
   1650     // Slot is in the current function context.  Move it into the
   1651     // destination register in case we store into it (the write barrier
   1652     // cannot be allowed to destroy the context in esi).
   1653     mov(dst, esi);
   1654   }
   1655 
   1656   // We should not have found a 'with' context by walking the context chain
   1657   // (i.e., the static scope chain and runtime context chain do not agree).
   1658   // A variable occurring in such a scope should have slot type LOOKUP and
   1659   // not CONTEXT.
   1660   if (emit_debug_code()) {
   1661     cmp(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
   1662     Check(equal, "Yo dawg, I heard you liked function contexts "
   1663                  "so I put function contexts in all your contexts");
   1664   }
   1665 }
   1666 
   1667 
   1668 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
   1669   // Load the global or builtins object from the current context.
   1670   mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
   1671   // Load the global context from the global or builtins object.
   1672   mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
   1673   // Load the function from the global context.
   1674   mov(function, Operand(function, Context::SlotOffset(index)));
   1675 }
   1676 
   1677 
   1678 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   1679                                                   Register map) {
   1680   // Load the initial map.  The global functions all have initial maps.
   1681   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   1682   if (emit_debug_code()) {
   1683     Label ok, fail;
   1684     CheckMap(map, isolate()->factory()->meta_map(), &fail, false);
   1685     jmp(&ok);
   1686     bind(&fail);
   1687     Abort("Global functions must have initial map");
   1688     bind(&ok);
   1689   }
   1690 }
   1691 
   1692 
   1693 // Store the value in register src in the safepoint register stack
   1694 // slot for register dst.
   1695 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
   1696   mov(SafepointRegisterSlot(dst), src);
   1697 }
   1698 
   1699 
   1700 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
   1701   mov(SafepointRegisterSlot(dst), src);
   1702 }
   1703 
   1704 
   1705 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
   1706   mov(dst, SafepointRegisterSlot(src));
   1707 }
   1708 
   1709 
   1710 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
   1711   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
   1712 }
   1713 
   1714 
   1715 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
   1716   // The registers are pushed starting with the lowest encoding,
   1717   // which means that lowest encodings are furthest away from
   1718   // the stack pointer.
   1719   ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
   1720   return kNumSafepointRegisters - reg_code - 1;
   1721 }
   1722 
   1723 
   1724 void MacroAssembler::Ret() {
   1725   ret(0);
   1726 }
   1727 
   1728 
   1729 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
   1730   if (is_uint16(bytes_dropped)) {
   1731     ret(bytes_dropped);
   1732   } else {
   1733     pop(scratch);
   1734     add(Operand(esp), Immediate(bytes_dropped));
   1735     push(scratch);
   1736     ret(0);
   1737   }
   1738 }
   1739 
   1740 
   1741 
   1742 
   1743 void MacroAssembler::Drop(int stack_elements) {
   1744   if (stack_elements > 0) {
   1745     add(Operand(esp), Immediate(stack_elements * kPointerSize));
   1746   }
   1747 }
   1748 
   1749 
   1750 void MacroAssembler::Move(Register dst, Register src) {
   1751   if (!dst.is(src)) {
   1752     mov(dst, src);
   1753   }
   1754 }
   1755 
   1756 
   1757 void MacroAssembler::Move(Register dst, Handle<Object> value) {
   1758   mov(dst, value);
   1759 }
   1760 
   1761 
   1762 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
   1763   if (FLAG_native_code_counters && counter->Enabled()) {
   1764     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
   1765   }
   1766 }
   1767 
   1768 
   1769 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
   1770   ASSERT(value > 0);
   1771   if (FLAG_native_code_counters && counter->Enabled()) {
   1772     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   1773     if (value == 1) {
   1774       inc(operand);
   1775     } else {
   1776       add(operand, Immediate(value));
   1777     }
   1778   }
   1779 }
   1780 
   1781 
   1782 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
   1783   ASSERT(value > 0);
   1784   if (FLAG_native_code_counters && counter->Enabled()) {
   1785     Operand operand = Operand::StaticVariable(ExternalReference(counter));
   1786     if (value == 1) {
   1787       dec(operand);
   1788     } else {
   1789       sub(operand, Immediate(value));
   1790     }
   1791   }
   1792 }
   1793 
   1794 
   1795 void MacroAssembler::IncrementCounter(Condition cc,
   1796                                       StatsCounter* counter,
   1797                                       int value) {
   1798   ASSERT(value > 0);
   1799   if (FLAG_native_code_counters && counter->Enabled()) {
   1800     Label skip;
   1801     j(NegateCondition(cc), &skip);
   1802     pushfd();
   1803     IncrementCounter(counter, value);
   1804     popfd();
   1805     bind(&skip);
   1806   }
   1807 }
   1808 
   1809 
   1810 void MacroAssembler::DecrementCounter(Condition cc,
   1811                                       StatsCounter* counter,
   1812                                       int value) {
   1813   ASSERT(value > 0);
   1814   if (FLAG_native_code_counters && counter->Enabled()) {
   1815     Label skip;
   1816     j(NegateCondition(cc), &skip);
   1817     pushfd();
   1818     DecrementCounter(counter, value);
   1819     popfd();
   1820     bind(&skip);
   1821   }
   1822 }
   1823 
   1824 
   1825 void MacroAssembler::Assert(Condition cc, const char* msg) {
   1826   if (emit_debug_code()) Check(cc, msg);
   1827 }
   1828 
   1829 
   1830 void MacroAssembler::AssertFastElements(Register elements) {
   1831   if (emit_debug_code()) {
   1832     Factory* factory = isolate()->factory();
   1833     Label ok;
   1834     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   1835         Immediate(factory->fixed_array_map()));
   1836     j(equal, &ok);
   1837     cmp(FieldOperand(elements, HeapObject::kMapOffset),
   1838         Immediate(factory->fixed_cow_array_map()));
   1839     j(equal, &ok);
   1840     Abort("JSObject with fast elements map has slow elements");
   1841     bind(&ok);
   1842   }
   1843 }
   1844 
   1845 
   1846 void MacroAssembler::Check(Condition cc, const char* msg) {
   1847   Label L;
   1848   j(cc, &L, taken);
   1849   Abort(msg);
   1850   // will not return here
   1851   bind(&L);
   1852 }
   1853 
   1854 
   1855 void MacroAssembler::CheckStackAlignment() {
   1856   int frame_alignment = OS::ActivationFrameAlignment();
   1857   int frame_alignment_mask = frame_alignment - 1;
   1858   if (frame_alignment > kPointerSize) {
   1859     ASSERT(IsPowerOf2(frame_alignment));
   1860     Label alignment_as_expected;
   1861     test(esp, Immediate(frame_alignment_mask));
   1862     j(zero, &alignment_as_expected);
   1863     // Abort if stack is not aligned.
   1864     int3();
   1865     bind(&alignment_as_expected);
   1866   }
   1867 }
   1868 
   1869 
   1870 void MacroAssembler::Abort(const char* msg) {
   1871   // We want to pass the msg string like a smi to avoid GC
   1872   // problems, however msg is not guaranteed to be aligned
   1873   // properly. Instead, we pass an aligned pointer that is
   1874   // a proper v8 smi, but also pass the alignment difference
   1875   // from the real pointer as a smi.
   1876   intptr_t p1 = reinterpret_cast<intptr_t>(msg);
   1877   intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
   1878   ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
   1879 #ifdef DEBUG
   1880   if (msg != NULL) {
   1881     RecordComment("Abort message: ");
   1882     RecordComment(msg);
   1883   }
   1884 #endif
   1885   // Disable stub call restrictions to always allow calls to abort.
   1886   AllowStubCallsScope allow_scope(this, true);
   1887 
   1888   push(eax);
   1889   push(Immediate(p0));
   1890   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
   1891   CallRuntime(Runtime::kAbort, 2);
   1892   // will not return here
   1893   int3();
   1894 }
   1895 
   1896 
   1897 void MacroAssembler::JumpIfNotNumber(Register reg,
   1898                                      TypeInfo info,
   1899                                      Label* on_not_number) {
   1900   if (emit_debug_code()) AbortIfSmi(reg);
   1901   if (!info.IsNumber()) {
   1902     cmp(FieldOperand(reg, HeapObject::kMapOffset),
   1903         isolate()->factory()->heap_number_map());
   1904     j(not_equal, on_not_number);
   1905   }
   1906 }
   1907 
   1908 
   1909 void MacroAssembler::ConvertToInt32(Register dst,
   1910                                     Register source,
   1911                                     Register scratch,
   1912                                     TypeInfo info,
   1913                                     Label* on_not_int32) {
   1914   if (emit_debug_code()) {
   1915     AbortIfSmi(source);
   1916     AbortIfNotNumber(source);
   1917   }
   1918   if (info.IsInteger32()) {
   1919     cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset));
   1920   } else {
   1921     Label done;
   1922     bool push_pop = (scratch.is(no_reg) && dst.is(source));
   1923     ASSERT(!scratch.is(source));
   1924     if (push_pop) {
   1925       push(dst);
   1926       scratch = dst;
   1927     }
   1928     if (scratch.is(no_reg)) scratch = dst;
   1929     cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset));
   1930     cmp(scratch, 0x80000000u);
   1931     if (push_pop) {
   1932       j(not_equal, &done);
   1933       pop(dst);
   1934       jmp(on_not_int32);
   1935     } else {
   1936       j(equal, on_not_int32);
   1937     }
   1938 
   1939     bind(&done);
   1940     if (push_pop) {
   1941       add(Operand(esp), Immediate(kPointerSize));  // Pop.
   1942     }
   1943     if (!scratch.is(dst)) {
   1944       mov(dst, scratch);
   1945     }
   1946   }
   1947 }
   1948 
   1949 
   1950 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
   1951                                   Register scratch,
   1952                                   int power) {
   1953   ASSERT(is_uintn(power + HeapNumber::kExponentBias,
   1954                   HeapNumber::kExponentBits));
   1955   mov(scratch, Immediate(power + HeapNumber::kExponentBias));
   1956   movd(dst, Operand(scratch));
   1957   psllq(dst, HeapNumber::kMantissaBits);
   1958 }
   1959 
   1960 
   1961 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
   1962     Register instance_type,
   1963     Register scratch,
   1964     Label* failure) {
   1965   if (!scratch.is(instance_type)) {
   1966     mov(scratch, instance_type);
   1967   }
   1968   and_(scratch,
   1969        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
   1970   cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
   1971   j(not_equal, failure);
   1972 }
   1973 
   1974 
   1975 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
   1976                                                          Register object2,
   1977                                                          Register scratch1,
   1978                                                          Register scratch2,
   1979                                                          Label* failure) {
   1980   // Check that both objects are not smis.
   1981   ASSERT_EQ(0, kSmiTag);
   1982   mov(scratch1, Operand(object1));
   1983   and_(scratch1, Operand(object2));
   1984   test(scratch1, Immediate(kSmiTagMask));
   1985   j(zero, failure);
   1986 
   1987   // Load instance type for both strings.
   1988   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
   1989   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
   1990   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
   1991   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
   1992 
   1993   // Check that both are flat ascii strings.
   1994   const int kFlatAsciiStringMask =
   1995       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
   1996   const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
   1997   // Interleave bits from both instance types and compare them in one check.
   1998   ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
   1999   and_(scratch1, kFlatAsciiStringMask);
   2000   and_(scratch2, kFlatAsciiStringMask);
   2001   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
   2002   cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
   2003   j(not_equal, failure);
   2004 }
   2005 
   2006 
   2007 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
   2008   int frame_alignment = OS::ActivationFrameAlignment();
   2009   if (frame_alignment != 0) {
   2010     // Make stack end at alignment and make room for num_arguments words
   2011     // and the original value of esp.
   2012     mov(scratch, esp);
   2013     sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
   2014     ASSERT(IsPowerOf2(frame_alignment));
   2015     and_(esp, -frame_alignment);
   2016     mov(Operand(esp, num_arguments * kPointerSize), scratch);
   2017   } else {
   2018     sub(Operand(esp), Immediate(num_arguments * kPointerSize));
   2019   }
   2020 }
   2021 
   2022 
   2023 void MacroAssembler::CallCFunction(ExternalReference function,
   2024                                    int num_arguments) {
   2025   // Trashing eax is ok as it will be the return value.
   2026   mov(Operand(eax), Immediate(function));
   2027   CallCFunction(eax, num_arguments);
   2028 }
   2029 
   2030 
   2031 void MacroAssembler::CallCFunction(Register function,
   2032                                    int num_arguments) {
   2033   // Check stack alignment.
   2034   if (emit_debug_code()) {
   2035     CheckStackAlignment();
   2036   }
   2037 
   2038   call(Operand(function));
   2039   if (OS::ActivationFrameAlignment() != 0) {
   2040     mov(esp, Operand(esp, num_arguments * kPointerSize));
   2041   } else {
   2042     add(Operand(esp), Immediate(num_arguments * kPointerSize));
   2043   }
   2044 }
   2045 
   2046 
   2047 CodePatcher::CodePatcher(byte* address, int size)
   2048     : address_(address),
   2049       size_(size),
   2050       masm_(Isolate::Current(), address, size + Assembler::kGap) {
   2051   // Create a new macro assembler pointing to the address of the code to patch.
   2052   // The size is adjusted with kGap on order for the assembler to generate size
   2053   // bytes of instructions without failing with buffer size constraints.
   2054   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2055 }
   2056 
   2057 
   2058 CodePatcher::~CodePatcher() {
   2059   // Indicate that code has changed.
   2060   CPU::FlushICache(address_, size_);
   2061 
   2062   // Check that the code was patched as expected.
   2063   ASSERT(masm_.pc_ == address_ + size_);
   2064   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
   2065 }
   2066 
   2067 
   2068 } }  // namespace v8::internal
   2069 
   2070 #endif  // V8_TARGET_ARCH_IA32
   2071