Home | History | Annotate | Download | only in x64
      1 // Copyright 2011 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_X64)
     31 
     32 #include "x64/lithium-codegen-x64.h"
     33 #include "code-stubs.h"
     34 #include "stub-cache.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 
     40 // When invoking builtins, we need to record the safepoint in the middle of
     41 // the invoke instruction sequence generated by the macro assembler.
     42 class SafepointGenerator : public CallWrapper {
     43  public:
     44   SafepointGenerator(LCodeGen* codegen,
     45                      LPointerMap* pointers,
     46                      int deoptimization_index)
     47       : codegen_(codegen),
     48         pointers_(pointers),
     49         deoptimization_index_(deoptimization_index) { }
     50   virtual ~SafepointGenerator() { }
     51 
     52   virtual void BeforeCall(int call_size) {
     53     ASSERT(call_size >= 0);
     54     // Ensure that we have enough space after the previous safepoint position
     55     // for the jump generated there.
     56     int call_end = codegen_->masm()->pc_offset() + call_size;
     57     int prev_jump_end = codegen_->LastSafepointEnd() + kMinSafepointSize;
     58     if (call_end < prev_jump_end) {
     59       int padding_size = prev_jump_end - call_end;
     60       STATIC_ASSERT(kMinSafepointSize <= 9);  // One multibyte nop is enough.
     61       codegen_->masm()->nop(padding_size);
     62     }
     63   }
     64 
     65   virtual void AfterCall() {
     66     codegen_->RecordSafepoint(pointers_, deoptimization_index_);
     67   }
     68 
     69  private:
     70   static const int kMinSafepointSize =
     71       MacroAssembler::kShortCallInstructionLength;
     72   LCodeGen* codegen_;
     73   LPointerMap* pointers_;
     74   int deoptimization_index_;
     75 };
     76 
     77 
     78 #define __ masm()->
     79 
     80 bool LCodeGen::GenerateCode() {
     81   HPhase phase("Code generation", chunk());
     82   ASSERT(is_unused());
     83   status_ = GENERATING;
     84   return GeneratePrologue() &&
     85       GenerateBody() &&
     86       GenerateDeferredCode() &&
     87       GenerateJumpTable() &&
     88       GenerateSafepointTable();
     89 }
     90 
     91 
     92 void LCodeGen::FinishCode(Handle<Code> code) {
     93   ASSERT(is_done());
     94   code->set_stack_slots(StackSlotCount());
     95   code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
     96   PopulateDeoptimizationData(code);
     97   Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
     98 }
     99 
    100 
    101 void LCodeGen::Abort(const char* format, ...) {
    102   if (FLAG_trace_bailout) {
    103     SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
    104     PrintF("Aborting LCodeGen in @\"%s\": ", *name);
    105     va_list arguments;
    106     va_start(arguments, format);
    107     OS::VPrint(format, arguments);
    108     va_end(arguments);
    109     PrintF("\n");
    110   }
    111   status_ = ABORTED;
    112 }
    113 
    114 
    115 void LCodeGen::Comment(const char* format, ...) {
    116   if (!FLAG_code_comments) return;
    117   char buffer[4 * KB];
    118   StringBuilder builder(buffer, ARRAY_SIZE(buffer));
    119   va_list arguments;
    120   va_start(arguments, format);
    121   builder.AddFormattedList(format, arguments);
    122   va_end(arguments);
    123 
    124   // Copy the string before recording it in the assembler to avoid
    125   // issues when the stack allocated buffer goes out of scope.
    126   int length = builder.position();
    127   Vector<char> copy = Vector<char>::New(length + 1);
    128   memcpy(copy.start(), builder.Finalize(), copy.length());
    129   masm()->RecordComment(copy.start());
    130 }
    131 
    132 
    133 bool LCodeGen::GeneratePrologue() {
    134   ASSERT(is_generating());
    135 
    136 #ifdef DEBUG
    137   if (strlen(FLAG_stop_at) > 0 &&
    138       info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
    139     __ int3();
    140   }
    141 #endif
    142 
    143   __ push(rbp);  // Caller's frame pointer.
    144   __ movq(rbp, rsp);
    145   __ push(rsi);  // Callee's context.
    146   __ push(rdi);  // Callee's JS function.
    147 
    148   // Reserve space for the stack slots needed by the code.
    149   int slots = StackSlotCount();
    150   if (slots > 0) {
    151     if (FLAG_debug_code) {
    152       __ Set(rax, slots);
    153       __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
    154       Label loop;
    155       __ bind(&loop);
    156       __ push(kScratchRegister);
    157       __ decl(rax);
    158       __ j(not_zero, &loop);
    159     } else {
    160       __ subq(rsp, Immediate(slots * kPointerSize));
    161 #ifdef _MSC_VER
    162       // On windows, you may not access the stack more than one page below
    163       // the most recently mapped page. To make the allocated area randomly
    164       // accessible, we write to each page in turn (the value is irrelevant).
    165       const int kPageSize = 4 * KB;
    166       for (int offset = slots * kPointerSize - kPageSize;
    167            offset > 0;
    168            offset -= kPageSize) {
    169         __ movq(Operand(rsp, offset), rax);
    170       }
    171 #endif
    172     }
    173   }
    174 
    175   // Possibly allocate a local context.
    176   int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    177   if (heap_slots > 0) {
    178     Comment(";;; Allocate local context");
    179     // Argument to NewContext is the function, which is still in rdi.
    180     __ push(rdi);
    181     if (heap_slots <= FastNewContextStub::kMaximumSlots) {
    182       FastNewContextStub stub(heap_slots);
    183       __ CallStub(&stub);
    184     } else {
    185       __ CallRuntime(Runtime::kNewContext, 1);
    186     }
    187     RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
    188     // Context is returned in both rax and rsi.  It replaces the context
    189     // passed to us.  It's saved in the stack and kept live in rsi.
    190     __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
    191 
    192     // Copy any necessary parameters into the context.
    193     int num_parameters = scope()->num_parameters();
    194     for (int i = 0; i < num_parameters; i++) {
    195       Slot* slot = scope()->parameter(i)->AsSlot();
    196       if (slot != NULL && slot->type() == Slot::CONTEXT) {
    197         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    198             (num_parameters - 1 - i) * kPointerSize;
    199         // Load parameter from stack.
    200         __ movq(rax, Operand(rbp, parameter_offset));
    201         // Store it in the context.
    202         int context_offset = Context::SlotOffset(slot->index());
    203         __ movq(Operand(rsi, context_offset), rax);
    204         // Update the write barrier. This clobbers all involved
    205         // registers, so we have use a third register to avoid
    206         // clobbering rsi.
    207         __ movq(rcx, rsi);
    208         __ RecordWrite(rcx, context_offset, rax, rbx);
    209       }
    210     }
    211     Comment(";;; End allocate local context");
    212   }
    213 
    214   // Trace the call.
    215   if (FLAG_trace) {
    216     __ CallRuntime(Runtime::kTraceEnter, 0);
    217   }
    218   return !is_aborted();
    219 }
    220 
    221 
    222 bool LCodeGen::GenerateBody() {
    223   ASSERT(is_generating());
    224   bool emit_instructions = true;
    225   for (current_instruction_ = 0;
    226        !is_aborted() && current_instruction_ < instructions_->length();
    227        current_instruction_++) {
    228     LInstruction* instr = instructions_->at(current_instruction_);
    229     if (instr->IsLabel()) {
    230       LLabel* label = LLabel::cast(instr);
    231       emit_instructions = !label->HasReplacement();
    232     }
    233 
    234     if (emit_instructions) {
    235       Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
    236       instr->CompileToNative(this);
    237     }
    238   }
    239   return !is_aborted();
    240 }
    241 
    242 
    243 LInstruction* LCodeGen::GetNextInstruction() {
    244   if (current_instruction_ < instructions_->length() - 1) {
    245     return instructions_->at(current_instruction_ + 1);
    246   } else {
    247     return NULL;
    248   }
    249 }
    250 
    251 
    252 bool LCodeGen::GenerateJumpTable() {
    253   for (int i = 0; i < jump_table_.length(); i++) {
    254     __ bind(&jump_table_[i].label);
    255     __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY);
    256   }
    257   return !is_aborted();
    258 }
    259 
    260 
    261 bool LCodeGen::GenerateDeferredCode() {
    262   ASSERT(is_generating());
    263   for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
    264     LDeferredCode* code = deferred_[i];
    265     __ bind(code->entry());
    266     code->Generate();
    267     __ jmp(code->exit());
    268   }
    269 
    270   // Deferred code is the last part of the instruction sequence. Mark
    271   // the generated code as done unless we bailed out.
    272   if (!is_aborted()) status_ = DONE;
    273   return !is_aborted();
    274 }
    275 
    276 
    277 bool LCodeGen::GenerateSafepointTable() {
    278   ASSERT(is_done());
    279   // Ensure that there is space at the end of the code to write a number
    280   // of jump instructions, as well as to afford writing a call near the end
    281   // of the code.
    282   // The jumps are used when there isn't room in the code stream to write
    283   // a long call instruction. Instead it writes a shorter call to a
    284   // jump instruction in the same code object.
    285   // The calls are used when lazy deoptimizing a function and calls to a
    286   // deoptimization function.
    287   int short_deopts = safepoints_.CountShortDeoptimizationIntervals(
    288       static_cast<unsigned>(MacroAssembler::kJumpInstructionLength));
    289   int byte_count = (short_deopts) * MacroAssembler::kJumpInstructionLength;
    290   while (byte_count-- > 0) {
    291     __ int3();
    292   }
    293   safepoints_.Emit(masm(), StackSlotCount());
    294   return !is_aborted();
    295 }
    296 
    297 
    298 Register LCodeGen::ToRegister(int index) const {
    299   return Register::FromAllocationIndex(index);
    300 }
    301 
    302 
    303 XMMRegister LCodeGen::ToDoubleRegister(int index) const {
    304   return XMMRegister::FromAllocationIndex(index);
    305 }
    306 
    307 
    308 Register LCodeGen::ToRegister(LOperand* op) const {
    309   ASSERT(op->IsRegister());
    310   return ToRegister(op->index());
    311 }
    312 
    313 
    314 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
    315   ASSERT(op->IsDoubleRegister());
    316   return ToDoubleRegister(op->index());
    317 }
    318 
    319 
    320 bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const {
    321   return op->IsConstantOperand() &&
    322       chunk_->LookupLiteralRepresentation(op).IsInteger32();
    323 }
    324 
    325 
    326 bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const {
    327   return op->IsConstantOperand() &&
    328       chunk_->LookupLiteralRepresentation(op).IsTagged();
    329 }
    330 
    331 
    332 int LCodeGen::ToInteger32(LConstantOperand* op) const {
    333   Handle<Object> value = chunk_->LookupLiteral(op);
    334   ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
    335   ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
    336       value->Number());
    337   return static_cast<int32_t>(value->Number());
    338 }
    339 
    340 
    341 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
    342   Handle<Object> literal = chunk_->LookupLiteral(op);
    343   ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
    344   return literal;
    345 }
    346 
    347 
    348 Operand LCodeGen::ToOperand(LOperand* op) const {
    349   // Does not handle registers. In X64 assembler, plain registers are not
    350   // representable as an Operand.
    351   ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
    352   int index = op->index();
    353   if (index >= 0) {
    354     // Local or spill slot. Skip the frame pointer, function, and
    355     // context in the fixed part of the frame.
    356     return Operand(rbp, -(index + 3) * kPointerSize);
    357   } else {
    358     // Incoming parameter. Skip the return address.
    359     return Operand(rbp, -(index - 1) * kPointerSize);
    360   }
    361 }
    362 
    363 
    364 void LCodeGen::WriteTranslation(LEnvironment* environment,
    365                                 Translation* translation) {
    366   if (environment == NULL) return;
    367 
    368   // The translation includes one command per value in the environment.
    369   int translation_size = environment->values()->length();
    370   // The output frame height does not include the parameters.
    371   int height = translation_size - environment->parameter_count();
    372 
    373   WriteTranslation(environment->outer(), translation);
    374   int closure_id = DefineDeoptimizationLiteral(environment->closure());
    375   translation->BeginFrame(environment->ast_id(), closure_id, height);
    376   for (int i = 0; i < translation_size; ++i) {
    377     LOperand* value = environment->values()->at(i);
    378     // spilled_registers_ and spilled_double_registers_ are either
    379     // both NULL or both set.
    380     if (environment->spilled_registers() != NULL && value != NULL) {
    381       if (value->IsRegister() &&
    382           environment->spilled_registers()[value->index()] != NULL) {
    383         translation->MarkDuplicate();
    384         AddToTranslation(translation,
    385                          environment->spilled_registers()[value->index()],
    386                          environment->HasTaggedValueAt(i));
    387       } else if (
    388           value->IsDoubleRegister() &&
    389           environment->spilled_double_registers()[value->index()] != NULL) {
    390         translation->MarkDuplicate();
    391         AddToTranslation(
    392             translation,
    393             environment->spilled_double_registers()[value->index()],
    394             false);
    395       }
    396     }
    397 
    398     AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
    399   }
    400 }
    401 
    402 
    403 void LCodeGen::AddToTranslation(Translation* translation,
    404                                 LOperand* op,
    405                                 bool is_tagged) {
    406   if (op == NULL) {
    407     // TODO(twuerthinger): Introduce marker operands to indicate that this value
    408     // is not present and must be reconstructed from the deoptimizer. Currently
    409     // this is only used for the arguments object.
    410     translation->StoreArgumentsObject();
    411   } else if (op->IsStackSlot()) {
    412     if (is_tagged) {
    413       translation->StoreStackSlot(op->index());
    414     } else {
    415       translation->StoreInt32StackSlot(op->index());
    416     }
    417   } else if (op->IsDoubleStackSlot()) {
    418     translation->StoreDoubleStackSlot(op->index());
    419   } else if (op->IsArgument()) {
    420     ASSERT(is_tagged);
    421     int src_index = StackSlotCount() + op->index();
    422     translation->StoreStackSlot(src_index);
    423   } else if (op->IsRegister()) {
    424     Register reg = ToRegister(op);
    425     if (is_tagged) {
    426       translation->StoreRegister(reg);
    427     } else {
    428       translation->StoreInt32Register(reg);
    429     }
    430   } else if (op->IsDoubleRegister()) {
    431     XMMRegister reg = ToDoubleRegister(op);
    432     translation->StoreDoubleRegister(reg);
    433   } else if (op->IsConstantOperand()) {
    434     Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
    435     int src_index = DefineDeoptimizationLiteral(literal);
    436     translation->StoreLiteral(src_index);
    437   } else {
    438     UNREACHABLE();
    439   }
    440 }
    441 
    442 
    443 void LCodeGen::CallCodeGeneric(Handle<Code> code,
    444                                RelocInfo::Mode mode,
    445                                LInstruction* instr,
    446                                SafepointMode safepoint_mode,
    447                                int argc) {
    448   ASSERT(instr != NULL);
    449   LPointerMap* pointers = instr->pointer_map();
    450   RecordPosition(pointers->position());
    451   __ call(code, mode);
    452   RegisterLazyDeoptimization(instr, safepoint_mode, argc);
    453 
    454   // Signal that we don't inline smi code before these stubs in the
    455   // optimizing code generator.
    456   if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
    457       code->kind() == Code::COMPARE_IC) {
    458     __ nop();
    459   }
    460 }
    461 
    462 
    463 void LCodeGen::CallCode(Handle<Code> code,
    464                         RelocInfo::Mode mode,
    465                         LInstruction* instr) {
    466   CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0);
    467 }
    468 
    469 
    470 void LCodeGen::CallRuntime(const Runtime::Function* function,
    471                            int num_arguments,
    472                            LInstruction* instr) {
    473   ASSERT(instr != NULL);
    474   ASSERT(instr->HasPointerMap());
    475   LPointerMap* pointers = instr->pointer_map();
    476   RecordPosition(pointers->position());
    477 
    478   __ CallRuntime(function, num_arguments);
    479   RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0);
    480 }
    481 
    482 
    483 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
    484                                        int argc,
    485                                        LInstruction* instr) {
    486   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
    487   __ CallRuntimeSaveDoubles(id);
    488   RecordSafepointWithRegisters(
    489       instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
    490 }
    491 
    492 
    493 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
    494                                           SafepointMode safepoint_mode,
    495                                           int argc) {
    496   // Create the environment to bailout to. If the call has side effects
    497   // execution has to continue after the call otherwise execution can continue
    498   // from a previous bailout point repeating the call.
    499   LEnvironment* deoptimization_environment;
    500   if (instr->HasDeoptimizationEnvironment()) {
    501     deoptimization_environment = instr->deoptimization_environment();
    502   } else {
    503     deoptimization_environment = instr->environment();
    504   }
    505 
    506   RegisterEnvironmentForDeoptimization(deoptimization_environment);
    507   if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
    508     ASSERT(argc == 0);
    509     RecordSafepoint(instr->pointer_map(),
    510                     deoptimization_environment->deoptimization_index());
    511   } else {
    512     ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS);
    513     RecordSafepointWithRegisters(
    514         instr->pointer_map(),
    515         argc,
    516         deoptimization_environment->deoptimization_index());
    517   }
    518 }
    519 
    520 
    521 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
    522   if (!environment->HasBeenRegistered()) {
    523     // Physical stack frame layout:
    524     // -x ............. -4  0 ..................................... y
    525     // [incoming arguments] [spill slots] [pushed outgoing arguments]
    526 
    527     // Layout of the environment:
    528     // 0 ..................................................... size-1
    529     // [parameters] [locals] [expression stack including arguments]
    530 
    531     // Layout of the translation:
    532     // 0 ........................................................ size - 1 + 4
    533     // [expression stack including arguments] [locals] [4 words] [parameters]
    534     // |>------------  translation_size ------------<|
    535 
    536     int frame_count = 0;
    537     for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
    538       ++frame_count;
    539     }
    540     Translation translation(&translations_, frame_count);
    541     WriteTranslation(environment, &translation);
    542     int deoptimization_index = deoptimizations_.length();
    543     environment->Register(deoptimization_index, translation.index());
    544     deoptimizations_.Add(environment);
    545   }
    546 }
    547 
    548 
    549 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
    550   RegisterEnvironmentForDeoptimization(environment);
    551   ASSERT(environment->HasBeenRegistered());
    552   int id = environment->deoptimization_index();
    553   Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
    554   ASSERT(entry != NULL);
    555   if (entry == NULL) {
    556     Abort("bailout was not prepared");
    557     return;
    558   }
    559 
    560   if (cc == no_condition) {
    561     __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
    562   } else {
    563     // We often have several deopts to the same entry, reuse the last
    564     // jump entry if this is the case.
    565     if (jump_table_.is_empty() ||
    566         jump_table_.last().address != entry) {
    567       jump_table_.Add(JumpTableEntry(entry));
    568     }
    569     __ j(cc, &jump_table_.last().label);
    570   }
    571 }
    572 
    573 
    574 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
    575   int length = deoptimizations_.length();
    576   if (length == 0) return;
    577   ASSERT(FLAG_deopt);
    578   Handle<DeoptimizationInputData> data =
    579       factory()->NewDeoptimizationInputData(length, TENURED);
    580 
    581   Handle<ByteArray> translations = translations_.CreateByteArray();
    582   data->SetTranslationByteArray(*translations);
    583   data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
    584 
    585   Handle<FixedArray> literals =
    586       factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
    587   for (int i = 0; i < deoptimization_literals_.length(); i++) {
    588     literals->set(i, *deoptimization_literals_[i]);
    589   }
    590   data->SetLiteralArray(*literals);
    591 
    592   data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
    593   data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
    594 
    595   // Populate the deoptimization entries.
    596   for (int i = 0; i < length; i++) {
    597     LEnvironment* env = deoptimizations_[i];
    598     data->SetAstId(i, Smi::FromInt(env->ast_id()));
    599     data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
    600     data->SetArgumentsStackHeight(i,
    601                                   Smi::FromInt(env->arguments_stack_height()));
    602   }
    603   code->set_deoptimization_data(*data);
    604 }
    605 
    606 
    607 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
    608   int result = deoptimization_literals_.length();
    609   for (int i = 0; i < deoptimization_literals_.length(); ++i) {
    610     if (deoptimization_literals_[i].is_identical_to(literal)) return i;
    611   }
    612   deoptimization_literals_.Add(literal);
    613   return result;
    614 }
    615 
    616 
    617 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
    618   ASSERT(deoptimization_literals_.length() == 0);
    619 
    620   const ZoneList<Handle<JSFunction> >* inlined_closures =
    621       chunk()->inlined_closures();
    622 
    623   for (int i = 0, length = inlined_closures->length();
    624        i < length;
    625        i++) {
    626     DefineDeoptimizationLiteral(inlined_closures->at(i));
    627   }
    628 
    629   inlined_function_count_ = deoptimization_literals_.length();
    630 }
    631 
    632 
    633 void LCodeGen::RecordSafepoint(
    634     LPointerMap* pointers,
    635     Safepoint::Kind kind,
    636     int arguments,
    637     int deoptimization_index) {
    638   ASSERT(kind == expected_safepoint_kind_);
    639 
    640   const ZoneList<LOperand*>* operands = pointers->operands();
    641 
    642   Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
    643       kind, arguments, deoptimization_index);
    644   for (int i = 0; i < operands->length(); i++) {
    645     LOperand* pointer = operands->at(i);
    646     if (pointer->IsStackSlot()) {
    647       safepoint.DefinePointerSlot(pointer->index());
    648     } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
    649       safepoint.DefinePointerRegister(ToRegister(pointer));
    650     }
    651   }
    652   if (kind & Safepoint::kWithRegisters) {
    653     // Register rsi always contains a pointer to the context.
    654     safepoint.DefinePointerRegister(rsi);
    655   }
    656 }
    657 
    658 
    659 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
    660                                int deoptimization_index) {
    661   RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
    662 }
    663 
    664 
    665 void LCodeGen::RecordSafepoint(int deoptimization_index) {
    666   LPointerMap empty_pointers(RelocInfo::kNoPosition);
    667   RecordSafepoint(&empty_pointers, deoptimization_index);
    668 }
    669 
    670 
    671 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
    672                                             int arguments,
    673                                             int deoptimization_index) {
    674   RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
    675       deoptimization_index);
    676 }
    677 
    678 
    679 void LCodeGen::RecordPosition(int position) {
    680   if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
    681   masm()->positions_recorder()->RecordPosition(position);
    682 }
    683 
    684 
    685 void LCodeGen::DoLabel(LLabel* label) {
    686   if (label->is_loop_header()) {
    687     Comment(";;; B%d - LOOP entry", label->block_id());
    688   } else {
    689     Comment(";;; B%d", label->block_id());
    690   }
    691   __ bind(label->label());
    692   current_block_ = label->block_id();
    693   LCodeGen::DoGap(label);
    694 }
    695 
    696 
    697 void LCodeGen::DoParallelMove(LParallelMove* move) {
    698   resolver_.Resolve(move);
    699 }
    700 
    701 
    702 void LCodeGen::DoGap(LGap* gap) {
    703   for (int i = LGap::FIRST_INNER_POSITION;
    704        i <= LGap::LAST_INNER_POSITION;
    705        i++) {
    706     LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
    707     LParallelMove* move = gap->GetParallelMove(inner_pos);
    708     if (move != NULL) DoParallelMove(move);
    709   }
    710 
    711   LInstruction* next = GetNextInstruction();
    712   if (next != NULL && next->IsLazyBailout()) {
    713     int pc = masm()->pc_offset();
    714     safepoints_.SetPcAfterGap(pc);
    715   }
    716 }
    717 
    718 
    719 void LCodeGen::DoParameter(LParameter* instr) {
    720   // Nothing to do.
    721 }
    722 
    723 
    724 void LCodeGen::DoCallStub(LCallStub* instr) {
    725   ASSERT(ToRegister(instr->result()).is(rax));
    726   switch (instr->hydrogen()->major_key()) {
    727     case CodeStub::RegExpConstructResult: {
    728       RegExpConstructResultStub stub;
    729       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    730       break;
    731     }
    732     case CodeStub::RegExpExec: {
    733       RegExpExecStub stub;
    734       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    735       break;
    736     }
    737     case CodeStub::SubString: {
    738       SubStringStub stub;
    739       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    740       break;
    741     }
    742     case CodeStub::NumberToString: {
    743       NumberToStringStub stub;
    744       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    745       break;
    746     }
    747     case CodeStub::StringAdd: {
    748       StringAddStub stub(NO_STRING_ADD_FLAGS);
    749       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    750       break;
    751     }
    752     case CodeStub::StringCompare: {
    753       StringCompareStub stub;
    754       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    755       break;
    756     }
    757     case CodeStub::TranscendentalCache: {
    758       TranscendentalCacheStub stub(instr->transcendental_type(),
    759                                    TranscendentalCacheStub::TAGGED);
    760       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    761       break;
    762     }
    763     default:
    764       UNREACHABLE();
    765   }
    766 }
    767 
    768 
    769 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
    770   // Nothing to do.
    771 }
    772 
    773 
    774 void LCodeGen::DoModI(LModI* instr) {
    775   if (instr->hydrogen()->HasPowerOf2Divisor()) {
    776     Register dividend = ToRegister(instr->InputAt(0));
    777 
    778     int32_t divisor =
    779         HConstant::cast(instr->hydrogen()->right())->Integer32Value();
    780 
    781     if (divisor < 0) divisor = -divisor;
    782 
    783     NearLabel positive_dividend, done;
    784     __ testl(dividend, dividend);
    785     __ j(not_sign, &positive_dividend);
    786     __ negl(dividend);
    787     __ andl(dividend, Immediate(divisor - 1));
    788     __ negl(dividend);
    789     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
    790       __ j(not_zero, &done);
    791       DeoptimizeIf(no_condition, instr->environment());
    792     } else {
    793       __ jmp(&done);
    794     }
    795     __ bind(&positive_dividend);
    796     __ andl(dividend, Immediate(divisor - 1));
    797     __ bind(&done);
    798   } else {
    799     LOperand* right = instr->InputAt(1);
    800     Register right_reg = ToRegister(right);
    801 
    802     ASSERT(ToRegister(instr->result()).is(rdx));
    803     ASSERT(ToRegister(instr->InputAt(0)).is(rax));
    804     ASSERT(!right_reg.is(rax));
    805     ASSERT(!right_reg.is(rdx));
    806 
    807     // Check for x % 0.
    808     if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
    809       __ testl(right_reg, right_reg);
    810       DeoptimizeIf(zero, instr->environment());
    811     }
    812 
    813     // Sign extend eax to edx.
    814     // (We are using only the low 32 bits of the values.)
    815     __ cdq();
    816 
    817     // Check for (0 % -x) that will produce negative zero.
    818     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
    819       NearLabel positive_left;
    820       NearLabel done;
    821       __ testl(rax, rax);
    822       __ j(not_sign, &positive_left);
    823       __ idivl(right_reg);
    824 
    825       // Test the remainder for 0, because then the result would be -0.
    826       __ testl(rdx, rdx);
    827       __ j(not_zero, &done);
    828 
    829       DeoptimizeIf(no_condition, instr->environment());
    830       __ bind(&positive_left);
    831       __ idivl(right_reg);
    832       __ bind(&done);
    833     } else {
    834       __ idivl(right_reg);
    835     }
    836   }
    837 }
    838 
    839 
    840 void LCodeGen::DoDivI(LDivI* instr) {
    841   LOperand* right = instr->InputAt(1);
    842   ASSERT(ToRegister(instr->result()).is(rax));
    843   ASSERT(ToRegister(instr->InputAt(0)).is(rax));
    844   ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
    845   ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
    846 
    847   Register left_reg = rax;
    848 
    849   // Check for x / 0.
    850   Register right_reg = ToRegister(right);
    851   if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
    852     __ testl(right_reg, right_reg);
    853     DeoptimizeIf(zero, instr->environment());
    854   }
    855 
    856   // Check for (0 / -x) that will produce negative zero.
    857   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
    858     NearLabel left_not_zero;
    859     __ testl(left_reg, left_reg);
    860     __ j(not_zero, &left_not_zero);
    861     __ testl(right_reg, right_reg);
    862     DeoptimizeIf(sign, instr->environment());
    863     __ bind(&left_not_zero);
    864   }
    865 
    866   // Check for (-kMinInt / -1).
    867   if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
    868     NearLabel left_not_min_int;
    869     __ cmpl(left_reg, Immediate(kMinInt));
    870     __ j(not_zero, &left_not_min_int);
    871     __ cmpl(right_reg, Immediate(-1));
    872     DeoptimizeIf(zero, instr->environment());
    873     __ bind(&left_not_min_int);
    874   }
    875 
    876   // Sign extend to rdx.
    877   __ cdq();
    878   __ idivl(right_reg);
    879 
    880   // Deoptimize if remainder is not 0.
    881   __ testl(rdx, rdx);
    882   DeoptimizeIf(not_zero, instr->environment());
    883 }
    884 
    885 
    886 void LCodeGen::DoMulI(LMulI* instr) {
    887   Register left = ToRegister(instr->InputAt(0));
    888   LOperand* right = instr->InputAt(1);
    889 
    890   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
    891     __ movl(kScratchRegister, left);
    892   }
    893 
    894   bool can_overflow =
    895       instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
    896   if (right->IsConstantOperand()) {
    897     int right_value = ToInteger32(LConstantOperand::cast(right));
    898     if (right_value == -1) {
    899       __ negl(left);
    900     } else if (right_value == 0) {
    901       __ xorl(left, left);
    902     } else if (right_value == 2) {
    903       __ addl(left, left);
    904     } else if (!can_overflow) {
    905       // If the multiplication is known to not overflow, we
    906       // can use operations that don't set the overflow flag
    907       // correctly.
    908       switch (right_value) {
    909         case 1:
    910           // Do nothing.
    911           break;
    912         case 3:
    913           __ leal(left, Operand(left, left, times_2, 0));
    914           break;
    915         case 4:
    916           __ shll(left, Immediate(2));
    917           break;
    918         case 5:
    919           __ leal(left, Operand(left, left, times_4, 0));
    920           break;
    921         case 8:
    922           __ shll(left, Immediate(3));
    923           break;
    924         case 9:
    925           __ leal(left, Operand(left, left, times_8, 0));
    926           break;
    927         case 16:
    928           __ shll(left, Immediate(4));
    929           break;
    930         default:
    931           __ imull(left, left, Immediate(right_value));
    932           break;
    933       }
    934     } else {
    935       __ imull(left, left, Immediate(right_value));
    936     }
    937   } else if (right->IsStackSlot()) {
    938     __ imull(left, ToOperand(right));
    939   } else {
    940     __ imull(left, ToRegister(right));
    941   }
    942 
    943   if (can_overflow) {
    944     DeoptimizeIf(overflow, instr->environment());
    945   }
    946 
    947   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
    948     // Bail out if the result is supposed to be negative zero.
    949     NearLabel done;
    950     __ testl(left, left);
    951     __ j(not_zero, &done);
    952     if (right->IsConstantOperand()) {
    953       if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
    954         DeoptimizeIf(no_condition, instr->environment());
    955       }
    956     } else if (right->IsStackSlot()) {
    957       __ or_(kScratchRegister, ToOperand(right));
    958       DeoptimizeIf(sign, instr->environment());
    959     } else {
    960       // Test the non-zero operand for negative sign.
    961       __ or_(kScratchRegister, ToRegister(right));
    962       DeoptimizeIf(sign, instr->environment());
    963     }
    964     __ bind(&done);
    965   }
    966 }
    967 
    968 
    969 void LCodeGen::DoBitI(LBitI* instr) {
    970   LOperand* left = instr->InputAt(0);
    971   LOperand* right = instr->InputAt(1);
    972   ASSERT(left->Equals(instr->result()));
    973   ASSERT(left->IsRegister());
    974 
    975   if (right->IsConstantOperand()) {
    976     int right_operand = ToInteger32(LConstantOperand::cast(right));
    977     switch (instr->op()) {
    978       case Token::BIT_AND:
    979         __ andl(ToRegister(left), Immediate(right_operand));
    980         break;
    981       case Token::BIT_OR:
    982         __ orl(ToRegister(left), Immediate(right_operand));
    983         break;
    984       case Token::BIT_XOR:
    985         __ xorl(ToRegister(left), Immediate(right_operand));
    986         break;
    987       default:
    988         UNREACHABLE();
    989         break;
    990     }
    991   } else if (right->IsStackSlot()) {
    992     switch (instr->op()) {
    993       case Token::BIT_AND:
    994         __ andl(ToRegister(left), ToOperand(right));
    995         break;
    996       case Token::BIT_OR:
    997         __ orl(ToRegister(left), ToOperand(right));
    998         break;
    999       case Token::BIT_XOR:
   1000         __ xorl(ToRegister(left), ToOperand(right));
   1001         break;
   1002       default:
   1003         UNREACHABLE();
   1004         break;
   1005     }
   1006   } else {
   1007     ASSERT(right->IsRegister());
   1008     switch (instr->op()) {
   1009       case Token::BIT_AND:
   1010         __ andl(ToRegister(left), ToRegister(right));
   1011         break;
   1012       case Token::BIT_OR:
   1013         __ orl(ToRegister(left), ToRegister(right));
   1014         break;
   1015       case Token::BIT_XOR:
   1016         __ xorl(ToRegister(left), ToRegister(right));
   1017         break;
   1018       default:
   1019         UNREACHABLE();
   1020         break;
   1021     }
   1022   }
   1023 }
   1024 
   1025 
   1026 void LCodeGen::DoShiftI(LShiftI* instr) {
   1027   LOperand* left = instr->InputAt(0);
   1028   LOperand* right = instr->InputAt(1);
   1029   ASSERT(left->Equals(instr->result()));
   1030   ASSERT(left->IsRegister());
   1031   if (right->IsRegister()) {
   1032     ASSERT(ToRegister(right).is(rcx));
   1033 
   1034     switch (instr->op()) {
   1035       case Token::SAR:
   1036         __ sarl_cl(ToRegister(left));
   1037         break;
   1038       case Token::SHR:
   1039         __ shrl_cl(ToRegister(left));
   1040         if (instr->can_deopt()) {
   1041           __ testl(ToRegister(left), ToRegister(left));
   1042           DeoptimizeIf(negative, instr->environment());
   1043         }
   1044         break;
   1045       case Token::SHL:
   1046         __ shll_cl(ToRegister(left));
   1047         break;
   1048       default:
   1049         UNREACHABLE();
   1050         break;
   1051     }
   1052   } else {
   1053     int value = ToInteger32(LConstantOperand::cast(right));
   1054     uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
   1055     switch (instr->op()) {
   1056       case Token::SAR:
   1057         if (shift_count != 0) {
   1058           __ sarl(ToRegister(left), Immediate(shift_count));
   1059         }
   1060         break;
   1061       case Token::SHR:
   1062         if (shift_count == 0 && instr->can_deopt()) {
   1063           __ testl(ToRegister(left), ToRegister(left));
   1064           DeoptimizeIf(negative, instr->environment());
   1065         } else {
   1066           __ shrl(ToRegister(left), Immediate(shift_count));
   1067         }
   1068         break;
   1069       case Token::SHL:
   1070         if (shift_count != 0) {
   1071           __ shll(ToRegister(left), Immediate(shift_count));
   1072         }
   1073         break;
   1074       default:
   1075         UNREACHABLE();
   1076         break;
   1077     }
   1078   }
   1079 }
   1080 
   1081 
   1082 void LCodeGen::DoSubI(LSubI* instr) {
   1083   LOperand* left = instr->InputAt(0);
   1084   LOperand* right = instr->InputAt(1);
   1085   ASSERT(left->Equals(instr->result()));
   1086 
   1087   if (right->IsConstantOperand()) {
   1088     __ subl(ToRegister(left),
   1089             Immediate(ToInteger32(LConstantOperand::cast(right))));
   1090   } else if (right->IsRegister()) {
   1091     __ subl(ToRegister(left), ToRegister(right));
   1092   } else {
   1093     __ subl(ToRegister(left), ToOperand(right));
   1094   }
   1095 
   1096   if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
   1097     DeoptimizeIf(overflow, instr->environment());
   1098   }
   1099 }
   1100 
   1101 
   1102 void LCodeGen::DoConstantI(LConstantI* instr) {
   1103   ASSERT(instr->result()->IsRegister());
   1104   __ Set(ToRegister(instr->result()), instr->value());
   1105 }
   1106 
   1107 
   1108 void LCodeGen::DoConstantD(LConstantD* instr) {
   1109   ASSERT(instr->result()->IsDoubleRegister());
   1110   XMMRegister res = ToDoubleRegister(instr->result());
   1111   double v = instr->value();
   1112   uint64_t int_val = BitCast<uint64_t, double>(v);
   1113   // Use xor to produce +0.0 in a fast and compact way, but avoid to
   1114   // do so if the constant is -0.0.
   1115   if (int_val == 0) {
   1116     __ xorpd(res, res);
   1117   } else {
   1118     Register tmp = ToRegister(instr->TempAt(0));
   1119     __ Set(tmp, int_val);
   1120     __ movq(res, tmp);
   1121   }
   1122 }
   1123 
   1124 
   1125 void LCodeGen::DoConstantT(LConstantT* instr) {
   1126   ASSERT(instr->result()->IsRegister());
   1127   __ Move(ToRegister(instr->result()), instr->value());
   1128 }
   1129 
   1130 
   1131 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
   1132   Register result = ToRegister(instr->result());
   1133   Register array = ToRegister(instr->InputAt(0));
   1134   __ movq(result, FieldOperand(array, JSArray::kLengthOffset));
   1135 }
   1136 
   1137 
   1138 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
   1139   Register result = ToRegister(instr->result());
   1140   Register array = ToRegister(instr->InputAt(0));
   1141   __ movq(result, FieldOperand(array, FixedArray::kLengthOffset));
   1142 }
   1143 
   1144 
   1145 void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
   1146   Register result = ToRegister(instr->result());
   1147   Register array = ToRegister(instr->InputAt(0));
   1148   __ movl(result, FieldOperand(array, ExternalPixelArray::kLengthOffset));
   1149 }
   1150 
   1151 
   1152 void LCodeGen::DoValueOf(LValueOf* instr) {
   1153   Register input = ToRegister(instr->InputAt(0));
   1154   Register result = ToRegister(instr->result());
   1155   ASSERT(input.is(result));
   1156   NearLabel done;
   1157   // If the object is a smi return the object.
   1158   __ JumpIfSmi(input, &done);
   1159 
   1160   // If the object is not a value type, return the object.
   1161   __ CmpObjectType(input, JS_VALUE_TYPE, kScratchRegister);
   1162   __ j(not_equal, &done);
   1163   __ movq(result, FieldOperand(input, JSValue::kValueOffset));
   1164 
   1165   __ bind(&done);
   1166 }
   1167 
   1168 
   1169 void LCodeGen::DoBitNotI(LBitNotI* instr) {
   1170   LOperand* input = instr->InputAt(0);
   1171   ASSERT(input->Equals(instr->result()));
   1172   __ not_(ToRegister(input));
   1173 }
   1174 
   1175 
   1176 void LCodeGen::DoThrow(LThrow* instr) {
   1177   __ push(ToRegister(instr->InputAt(0)));
   1178   CallRuntime(Runtime::kThrow, 1, instr);
   1179 
   1180   if (FLAG_debug_code) {
   1181     Comment("Unreachable code.");
   1182     __ int3();
   1183   }
   1184 }
   1185 
   1186 
   1187 void LCodeGen::DoAddI(LAddI* instr) {
   1188   LOperand* left = instr->InputAt(0);
   1189   LOperand* right = instr->InputAt(1);
   1190   ASSERT(left->Equals(instr->result()));
   1191 
   1192   if (right->IsConstantOperand()) {
   1193     __ addl(ToRegister(left),
   1194             Immediate(ToInteger32(LConstantOperand::cast(right))));
   1195   } else if (right->IsRegister()) {
   1196     __ addl(ToRegister(left), ToRegister(right));
   1197   } else {
   1198     __ addl(ToRegister(left), ToOperand(right));
   1199   }
   1200 
   1201   if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
   1202     DeoptimizeIf(overflow, instr->environment());
   1203   }
   1204 }
   1205 
   1206 
   1207 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
   1208   XMMRegister left = ToDoubleRegister(instr->InputAt(0));
   1209   XMMRegister right = ToDoubleRegister(instr->InputAt(1));
   1210   XMMRegister result = ToDoubleRegister(instr->result());
   1211   // All operations except MOD are computed in-place.
   1212   ASSERT(instr->op() == Token::MOD || left.is(result));
   1213   switch (instr->op()) {
   1214     case Token::ADD:
   1215       __ addsd(left, right);
   1216       break;
   1217     case Token::SUB:
   1218        __ subsd(left, right);
   1219        break;
   1220     case Token::MUL:
   1221       __ mulsd(left, right);
   1222       break;
   1223     case Token::DIV:
   1224       __ divsd(left, right);
   1225       break;
   1226     case Token::MOD:
   1227       __ PrepareCallCFunction(2);
   1228       __ movsd(xmm0, left);
   1229       ASSERT(right.is(xmm1));
   1230       __ CallCFunction(
   1231           ExternalReference::double_fp_operation(Token::MOD, isolate()), 2);
   1232       __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   1233       __ movsd(result, xmm0);
   1234       break;
   1235     default:
   1236       UNREACHABLE();
   1237       break;
   1238   }
   1239 }
   1240 
   1241 
   1242 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
   1243   ASSERT(ToRegister(instr->InputAt(0)).is(rdx));
   1244   ASSERT(ToRegister(instr->InputAt(1)).is(rax));
   1245   ASSERT(ToRegister(instr->result()).is(rax));
   1246 
   1247   TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
   1248   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   1249   __ nop();  // Signals no inlined code.
   1250 }
   1251 
   1252 
   1253 int LCodeGen::GetNextEmittedBlock(int block) {
   1254   for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
   1255     LLabel* label = chunk_->GetLabel(i);
   1256     if (!label->HasReplacement()) return i;
   1257   }
   1258   return -1;
   1259 }
   1260 
   1261 
   1262 void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
   1263   int next_block = GetNextEmittedBlock(current_block_);
   1264   right_block = chunk_->LookupDestination(right_block);
   1265   left_block = chunk_->LookupDestination(left_block);
   1266 
   1267   if (right_block == left_block) {
   1268     EmitGoto(left_block);
   1269   } else if (left_block == next_block) {
   1270     __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
   1271   } else if (right_block == next_block) {
   1272     __ j(cc, chunk_->GetAssemblyLabel(left_block));
   1273   } else {
   1274     __ j(cc, chunk_->GetAssemblyLabel(left_block));
   1275     if (cc != always) {
   1276       __ jmp(chunk_->GetAssemblyLabel(right_block));
   1277     }
   1278   }
   1279 }
   1280 
   1281 
   1282 void LCodeGen::DoBranch(LBranch* instr) {
   1283   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1284   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1285 
   1286   Representation r = instr->hydrogen()->representation();
   1287   if (r.IsInteger32()) {
   1288     Register reg = ToRegister(instr->InputAt(0));
   1289     __ testl(reg, reg);
   1290     EmitBranch(true_block, false_block, not_zero);
   1291   } else if (r.IsDouble()) {
   1292     XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
   1293     __ xorpd(xmm0, xmm0);
   1294     __ ucomisd(reg, xmm0);
   1295     EmitBranch(true_block, false_block, not_equal);
   1296   } else {
   1297     ASSERT(r.IsTagged());
   1298     Register reg = ToRegister(instr->InputAt(0));
   1299     HType type = instr->hydrogen()->type();
   1300     if (type.IsBoolean()) {
   1301       __ CompareRoot(reg, Heap::kTrueValueRootIndex);
   1302       EmitBranch(true_block, false_block, equal);
   1303     } else if (type.IsSmi()) {
   1304       __ SmiCompare(reg, Smi::FromInt(0));
   1305       EmitBranch(true_block, false_block, not_equal);
   1306     } else {
   1307       Label* true_label = chunk_->GetAssemblyLabel(true_block);
   1308       Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1309 
   1310       __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
   1311       __ j(equal, false_label);
   1312       __ CompareRoot(reg, Heap::kTrueValueRootIndex);
   1313       __ j(equal, true_label);
   1314       __ CompareRoot(reg, Heap::kFalseValueRootIndex);
   1315       __ j(equal, false_label);
   1316       __ Cmp(reg, Smi::FromInt(0));
   1317       __ j(equal, false_label);
   1318       __ JumpIfSmi(reg, true_label);
   1319 
   1320       // Test for double values. Plus/minus zero and NaN are false.
   1321       NearLabel call_stub;
   1322       __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
   1323                      Heap::kHeapNumberMapRootIndex);
   1324       __ j(not_equal, &call_stub);
   1325 
   1326       // HeapNumber => false iff +0, -0, or NaN. These three cases set the
   1327       // zero flag when compared to zero using ucomisd.
   1328       __ xorpd(xmm0, xmm0);
   1329       __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
   1330       __ j(zero, false_label);
   1331       __ jmp(true_label);
   1332 
   1333       // The conversion stub doesn't cause garbage collections so it's
   1334       // safe to not record a safepoint after the call.
   1335       __ bind(&call_stub);
   1336       ToBooleanStub stub;
   1337       __ Pushad();
   1338       __ push(reg);
   1339       __ CallStub(&stub);
   1340       __ testq(rax, rax);
   1341       __ Popad();
   1342       EmitBranch(true_block, false_block, not_zero);
   1343     }
   1344   }
   1345 }
   1346 
   1347 
   1348 void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
   1349   block = chunk_->LookupDestination(block);
   1350   int next_block = GetNextEmittedBlock(current_block_);
   1351   if (block != next_block) {
   1352     // Perform stack overflow check if this goto needs it before jumping.
   1353     if (deferred_stack_check != NULL) {
   1354       __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
   1355       __ j(above_equal, chunk_->GetAssemblyLabel(block));
   1356       __ jmp(deferred_stack_check->entry());
   1357       deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
   1358     } else {
   1359       __ jmp(chunk_->GetAssemblyLabel(block));
   1360     }
   1361   }
   1362 }
   1363 
   1364 
   1365 void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
   1366   PushSafepointRegistersScope scope(this);
   1367   CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
   1368 }
   1369 
   1370 
   1371 void LCodeGen::DoGoto(LGoto* instr) {
   1372   class DeferredStackCheck: public LDeferredCode {
   1373    public:
   1374     DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
   1375         : LDeferredCode(codegen), instr_(instr) { }
   1376     virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
   1377    private:
   1378     LGoto* instr_;
   1379   };
   1380 
   1381   DeferredStackCheck* deferred = NULL;
   1382   if (instr->include_stack_check()) {
   1383     deferred = new DeferredStackCheck(this, instr);
   1384   }
   1385   EmitGoto(instr->block_id(), deferred);
   1386 }
   1387 
   1388 
   1389 inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
   1390   Condition cond = no_condition;
   1391   switch (op) {
   1392     case Token::EQ:
   1393     case Token::EQ_STRICT:
   1394       cond = equal;
   1395       break;
   1396     case Token::LT:
   1397       cond = is_unsigned ? below : less;
   1398       break;
   1399     case Token::GT:
   1400       cond = is_unsigned ? above : greater;
   1401       break;
   1402     case Token::LTE:
   1403       cond = is_unsigned ? below_equal : less_equal;
   1404       break;
   1405     case Token::GTE:
   1406       cond = is_unsigned ? above_equal : greater_equal;
   1407       break;
   1408     case Token::IN:
   1409     case Token::INSTANCEOF:
   1410     default:
   1411       UNREACHABLE();
   1412   }
   1413   return cond;
   1414 }
   1415 
   1416 
   1417 void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
   1418   if (right->IsConstantOperand()) {
   1419     int32_t value = ToInteger32(LConstantOperand::cast(right));
   1420     if (left->IsRegister()) {
   1421       __ cmpl(ToRegister(left), Immediate(value));
   1422     } else {
   1423       __ cmpl(ToOperand(left), Immediate(value));
   1424     }
   1425   } else if (right->IsRegister()) {
   1426     __ cmpl(ToRegister(left), ToRegister(right));
   1427   } else {
   1428     __ cmpl(ToRegister(left), ToOperand(right));
   1429   }
   1430 }
   1431 
   1432 
   1433 void LCodeGen::DoCmpID(LCmpID* instr) {
   1434   LOperand* left = instr->InputAt(0);
   1435   LOperand* right = instr->InputAt(1);
   1436   LOperand* result = instr->result();
   1437 
   1438   NearLabel unordered;
   1439   if (instr->is_double()) {
   1440     // Don't base result on EFLAGS when a NaN is involved. Instead
   1441     // jump to the unordered case, which produces a false value.
   1442     __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
   1443     __ j(parity_even, &unordered);
   1444   } else {
   1445     EmitCmpI(left, right);
   1446   }
   1447 
   1448   NearLabel done;
   1449   Condition cc = TokenToCondition(instr->op(), instr->is_double());
   1450   __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
   1451   __ j(cc, &done);
   1452 
   1453   __ bind(&unordered);
   1454   __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
   1455   __ bind(&done);
   1456 }
   1457 
   1458 
   1459 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
   1460   LOperand* left = instr->InputAt(0);
   1461   LOperand* right = instr->InputAt(1);
   1462   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1463   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1464 
   1465   if (instr->is_double()) {
   1466     // Don't base result on EFLAGS when a NaN is involved. Instead
   1467     // jump to the false block.
   1468     __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
   1469     __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
   1470   } else {
   1471     EmitCmpI(left, right);
   1472   }
   1473 
   1474   Condition cc = TokenToCondition(instr->op(), instr->is_double());
   1475   EmitBranch(true_block, false_block, cc);
   1476 }
   1477 
   1478 
   1479 void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
   1480   Register left = ToRegister(instr->InputAt(0));
   1481   Register right = ToRegister(instr->InputAt(1));
   1482   Register result = ToRegister(instr->result());
   1483 
   1484   NearLabel different, done;
   1485   __ cmpq(left, right);
   1486   __ j(not_equal, &different);
   1487   __ LoadRoot(result, Heap::kTrueValueRootIndex);
   1488   __ jmp(&done);
   1489   __ bind(&different);
   1490   __ LoadRoot(result, Heap::kFalseValueRootIndex);
   1491   __ bind(&done);
   1492 }
   1493 
   1494 
   1495 void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
   1496   Register left = ToRegister(instr->InputAt(0));
   1497   Register right = ToRegister(instr->InputAt(1));
   1498   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1499   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1500 
   1501   __ cmpq(left, right);
   1502   EmitBranch(true_block, false_block, equal);
   1503 }
   1504 
   1505 
   1506 void LCodeGen::DoIsNull(LIsNull* instr) {
   1507   Register reg = ToRegister(instr->InputAt(0));
   1508   Register result = ToRegister(instr->result());
   1509 
   1510   // If the expression is known to be a smi, then it's
   1511   // definitely not null. Materialize false.
   1512   // Consider adding other type and representation tests too.
   1513   if (instr->hydrogen()->value()->type().IsSmi()) {
   1514     __ LoadRoot(result, Heap::kFalseValueRootIndex);
   1515     return;
   1516   }
   1517 
   1518   __ CompareRoot(reg, Heap::kNullValueRootIndex);
   1519   if (instr->is_strict()) {
   1520     ASSERT(Heap::kTrueValueRootIndex >= 0);
   1521     __ movl(result, Immediate(Heap::kTrueValueRootIndex));
   1522     NearLabel load;
   1523     __ j(equal, &load);
   1524     __ Set(result, Heap::kFalseValueRootIndex);
   1525     __ bind(&load);
   1526     __ LoadRootIndexed(result, result, 0);
   1527   } else {
   1528     NearLabel true_value, false_value, done;
   1529     __ j(equal, &true_value);
   1530     __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
   1531     __ j(equal, &true_value);
   1532     __ JumpIfSmi(reg, &false_value);
   1533     // Check for undetectable objects by looking in the bit field in
   1534     // the map. The object has already been smi checked.
   1535     Register scratch = result;
   1536     __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
   1537     __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
   1538              Immediate(1 << Map::kIsUndetectable));
   1539     __ j(not_zero, &true_value);
   1540     __ bind(&false_value);
   1541     __ LoadRoot(result, Heap::kFalseValueRootIndex);
   1542     __ jmp(&done);
   1543     __ bind(&true_value);
   1544     __ LoadRoot(result, Heap::kTrueValueRootIndex);
   1545     __ bind(&done);
   1546   }
   1547 }
   1548 
   1549 
   1550 void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
   1551   Register reg = ToRegister(instr->InputAt(0));
   1552 
   1553   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1554 
   1555   if (instr->hydrogen()->representation().IsSpecialization() ||
   1556       instr->hydrogen()->type().IsSmi()) {
   1557     // If the expression is known to untagged or smi, then it's definitely
   1558     // not null, and it can't be a an undetectable object.
   1559     // Jump directly to the false block.
   1560     EmitGoto(false_block);
   1561     return;
   1562   }
   1563 
   1564   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1565 
   1566   __ CompareRoot(reg, Heap::kNullValueRootIndex);
   1567   if (instr->is_strict()) {
   1568     EmitBranch(true_block, false_block, equal);
   1569   } else {
   1570     Label* true_label = chunk_->GetAssemblyLabel(true_block);
   1571     Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1572     __ j(equal, true_label);
   1573     __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
   1574     __ j(equal, true_label);
   1575     __ JumpIfSmi(reg, false_label);
   1576     // Check for undetectable objects by looking in the bit field in
   1577     // the map. The object has already been smi checked.
   1578     Register scratch = ToRegister(instr->TempAt(0));
   1579     __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
   1580     __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
   1581              Immediate(1 << Map::kIsUndetectable));
   1582     EmitBranch(true_block, false_block, not_zero);
   1583   }
   1584 }
   1585 
   1586 
   1587 Condition LCodeGen::EmitIsObject(Register input,
   1588                                  Label* is_not_object,
   1589                                  Label* is_object) {
   1590   ASSERT(!input.is(kScratchRegister));
   1591 
   1592   __ JumpIfSmi(input, is_not_object);
   1593 
   1594   __ CompareRoot(input, Heap::kNullValueRootIndex);
   1595   __ j(equal, is_object);
   1596 
   1597   __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
   1598   // Undetectable objects behave like undefined.
   1599   __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
   1600            Immediate(1 << Map::kIsUndetectable));
   1601   __ j(not_zero, is_not_object);
   1602 
   1603   __ movzxbl(kScratchRegister,
   1604              FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
   1605   __ cmpb(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE));
   1606   __ j(below, is_not_object);
   1607   __ cmpb(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE));
   1608   return below_equal;
   1609 }
   1610 
   1611 
   1612 void LCodeGen::DoIsObject(LIsObject* instr) {
   1613   Register reg = ToRegister(instr->InputAt(0));
   1614   Register result = ToRegister(instr->result());
   1615   Label is_false, is_true, done;
   1616 
   1617   Condition true_cond = EmitIsObject(reg, &is_false, &is_true);
   1618   __ j(true_cond, &is_true);
   1619 
   1620   __ bind(&is_false);
   1621   __ LoadRoot(result, Heap::kFalseValueRootIndex);
   1622   __ jmp(&done);
   1623 
   1624   __ bind(&is_true);
   1625   __ LoadRoot(result, Heap::kTrueValueRootIndex);
   1626 
   1627   __ bind(&done);
   1628 }
   1629 
   1630 
   1631 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
   1632   Register reg = ToRegister(instr->InputAt(0));
   1633 
   1634   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1635   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1636   Label* true_label = chunk_->GetAssemblyLabel(true_block);
   1637   Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1638 
   1639   Condition true_cond = EmitIsObject(reg, false_label, true_label);
   1640 
   1641   EmitBranch(true_block, false_block, true_cond);
   1642 }
   1643 
   1644 
   1645 void LCodeGen::DoIsSmi(LIsSmi* instr) {
   1646   LOperand* input_operand = instr->InputAt(0);
   1647   Register result = ToRegister(instr->result());
   1648   if (input_operand->IsRegister()) {
   1649     Register input = ToRegister(input_operand);
   1650     __ CheckSmiToIndicator(result, input);
   1651   } else {
   1652     Operand input = ToOperand(instr->InputAt(0));
   1653     __ CheckSmiToIndicator(result, input);
   1654   }
   1655   // result is zero if input is a smi, and one otherwise.
   1656   ASSERT(Heap::kFalseValueRootIndex == Heap::kTrueValueRootIndex + 1);
   1657   __ LoadRootIndexed(result, result, Heap::kTrueValueRootIndex);
   1658 }
   1659 
   1660 
   1661 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
   1662   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1663   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1664 
   1665   Condition is_smi;
   1666   if (instr->InputAt(0)->IsRegister()) {
   1667     Register input = ToRegister(instr->InputAt(0));
   1668     is_smi = masm()->CheckSmi(input);
   1669   } else {
   1670     Operand input = ToOperand(instr->InputAt(0));
   1671     is_smi = masm()->CheckSmi(input);
   1672   }
   1673   EmitBranch(true_block, false_block, is_smi);
   1674 }
   1675 
   1676 
   1677 static InstanceType TestType(HHasInstanceType* instr) {
   1678   InstanceType from = instr->from();
   1679   InstanceType to = instr->to();
   1680   if (from == FIRST_TYPE) return to;
   1681   ASSERT(from == to || to == LAST_TYPE);
   1682   return from;
   1683 }
   1684 
   1685 
   1686 static Condition BranchCondition(HHasInstanceType* instr) {
   1687   InstanceType from = instr->from();
   1688   InstanceType to = instr->to();
   1689   if (from == to) return equal;
   1690   if (to == LAST_TYPE) return above_equal;
   1691   if (from == FIRST_TYPE) return below_equal;
   1692   UNREACHABLE();
   1693   return equal;
   1694 }
   1695 
   1696 
   1697 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
   1698   Register input = ToRegister(instr->InputAt(0));
   1699   Register result = ToRegister(instr->result());
   1700 
   1701   ASSERT(instr->hydrogen()->value()->representation().IsTagged());
   1702   __ testl(input, Immediate(kSmiTagMask));
   1703   NearLabel done, is_false;
   1704   __ j(zero, &is_false);
   1705   __ CmpObjectType(input, TestType(instr->hydrogen()), result);
   1706   __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
   1707   __ LoadRoot(result, Heap::kTrueValueRootIndex);
   1708   __ jmp(&done);
   1709   __ bind(&is_false);
   1710   __ LoadRoot(result, Heap::kFalseValueRootIndex);
   1711   __ bind(&done);
   1712 }
   1713 
   1714 
   1715 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
   1716   Register input = ToRegister(instr->InputAt(0));
   1717 
   1718   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1719   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1720 
   1721   Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1722 
   1723   __ JumpIfSmi(input, false_label);
   1724 
   1725   __ CmpObjectType(input, TestType(instr->hydrogen()), kScratchRegister);
   1726   EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
   1727 }
   1728 
   1729 
   1730 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
   1731   Register input = ToRegister(instr->InputAt(0));
   1732   Register result = ToRegister(instr->result());
   1733 
   1734   if (FLAG_debug_code) {
   1735     __ AbortIfNotString(input);
   1736   }
   1737 
   1738   __ movl(result, FieldOperand(input, String::kHashFieldOffset));
   1739   ASSERT(String::kHashShift >= kSmiTagSize);
   1740   __ IndexFromHash(result, result);
   1741 }
   1742 
   1743 
   1744 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
   1745   Register input = ToRegister(instr->InputAt(0));
   1746   Register result = ToRegister(instr->result());
   1747 
   1748   ASSERT(instr->hydrogen()->value()->representation().IsTagged());
   1749   __ LoadRoot(result, Heap::kTrueValueRootIndex);
   1750   __ testl(FieldOperand(input, String::kHashFieldOffset),
   1751            Immediate(String::kContainsCachedArrayIndexMask));
   1752   NearLabel done;
   1753   __ j(zero, &done);
   1754   __ LoadRoot(result, Heap::kFalseValueRootIndex);
   1755   __ bind(&done);
   1756 }
   1757 
   1758 
   1759 void LCodeGen::DoHasCachedArrayIndexAndBranch(
   1760     LHasCachedArrayIndexAndBranch* instr) {
   1761   Register input = ToRegister(instr->InputAt(0));
   1762 
   1763   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1764   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1765 
   1766   __ testl(FieldOperand(input, String::kHashFieldOffset),
   1767            Immediate(String::kContainsCachedArrayIndexMask));
   1768   EmitBranch(true_block, false_block, equal);
   1769 }
   1770 
   1771 
   1772 // Branches to a label or falls through with the answer in the z flag.
   1773 // Trashes the temp register and possibly input (if it and temp are aliased).
   1774 void LCodeGen::EmitClassOfTest(Label* is_true,
   1775                                Label* is_false,
   1776                                Handle<String> class_name,
   1777                                Register input,
   1778                                Register temp) {
   1779   __ JumpIfSmi(input, is_false);
   1780   __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
   1781   __ j(below, is_false);
   1782 
   1783   // Map is now in temp.
   1784   // Functions have class 'Function'.
   1785   __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
   1786   if (class_name->IsEqualTo(CStrVector("Function"))) {
   1787     __ j(equal, is_true);
   1788   } else {
   1789     __ j(equal, is_false);
   1790   }
   1791 
   1792   // Check if the constructor in the map is a function.
   1793   __ movq(temp, FieldOperand(temp, Map::kConstructorOffset));
   1794 
   1795   // As long as JS_FUNCTION_TYPE is the last instance type and it is
   1796   // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
   1797   // LAST_JS_OBJECT_TYPE.
   1798   ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
   1799   ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
   1800 
   1801   // Objects with a non-function constructor have class 'Object'.
   1802   __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister);
   1803   if (class_name->IsEqualTo(CStrVector("Object"))) {
   1804     __ j(not_equal, is_true);
   1805   } else {
   1806     __ j(not_equal, is_false);
   1807   }
   1808 
   1809   // temp now contains the constructor function. Grab the
   1810   // instance class name from there.
   1811   __ movq(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
   1812   __ movq(temp, FieldOperand(temp,
   1813                              SharedFunctionInfo::kInstanceClassNameOffset));
   1814   // The class name we are testing against is a symbol because it's a literal.
   1815   // The name in the constructor is a symbol because of the way the context is
   1816   // booted.  This routine isn't expected to work for random API-created
   1817   // classes and it doesn't have to because you can't access it with natives
   1818   // syntax.  Since both sides are symbols it is sufficient to use an identity
   1819   // comparison.
   1820   ASSERT(class_name->IsSymbol());
   1821   __ Cmp(temp, class_name);
   1822   // End with the answer in the z flag.
   1823 }
   1824 
   1825 
   1826 void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
   1827   Register input = ToRegister(instr->InputAt(0));
   1828   Register result = ToRegister(instr->result());
   1829   ASSERT(input.is(result));
   1830   Register temp = ToRegister(instr->TempAt(0));
   1831   Handle<String> class_name = instr->hydrogen()->class_name();
   1832   NearLabel done;
   1833   Label is_true, is_false;
   1834 
   1835   EmitClassOfTest(&is_true, &is_false, class_name, input, temp);
   1836 
   1837   __ j(not_equal, &is_false);
   1838 
   1839   __ bind(&is_true);
   1840   __ LoadRoot(result, Heap::kTrueValueRootIndex);
   1841   __ jmp(&done);
   1842 
   1843   __ bind(&is_false);
   1844   __ LoadRoot(result, Heap::kFalseValueRootIndex);
   1845   __ bind(&done);
   1846 }
   1847 
   1848 
   1849 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
   1850   Register input = ToRegister(instr->InputAt(0));
   1851   Register temp = ToRegister(instr->TempAt(0));
   1852   Handle<String> class_name = instr->hydrogen()->class_name();
   1853 
   1854   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1855   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1856 
   1857   Label* true_label = chunk_->GetAssemblyLabel(true_block);
   1858   Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1859 
   1860   EmitClassOfTest(true_label, false_label, class_name, input, temp);
   1861 
   1862   EmitBranch(true_block, false_block, equal);
   1863 }
   1864 
   1865 
   1866 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
   1867   Register reg = ToRegister(instr->InputAt(0));
   1868   int true_block = instr->true_block_id();
   1869   int false_block = instr->false_block_id();
   1870 
   1871   __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
   1872   EmitBranch(true_block, false_block, equal);
   1873 }
   1874 
   1875 
   1876 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
   1877   InstanceofStub stub(InstanceofStub::kNoFlags);
   1878   __ push(ToRegister(instr->InputAt(0)));
   1879   __ push(ToRegister(instr->InputAt(1)));
   1880   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   1881   NearLabel true_value, done;
   1882   __ testq(rax, rax);
   1883   __ j(zero, &true_value);
   1884   __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
   1885   __ jmp(&done);
   1886   __ bind(&true_value);
   1887   __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
   1888   __ bind(&done);
   1889 }
   1890 
   1891 
   1892 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
   1893   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1894   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1895 
   1896   InstanceofStub stub(InstanceofStub::kNoFlags);
   1897   __ push(ToRegister(instr->InputAt(0)));
   1898   __ push(ToRegister(instr->InputAt(1)));
   1899   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   1900   __ testq(rax, rax);
   1901   EmitBranch(true_block, false_block, zero);
   1902 }
   1903 
   1904 
   1905 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
   1906   class DeferredInstanceOfKnownGlobal: public LDeferredCode {
   1907    public:
   1908     DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
   1909                                   LInstanceOfKnownGlobal* instr)
   1910         : LDeferredCode(codegen), instr_(instr) { }
   1911     virtual void Generate() {
   1912       codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
   1913     }
   1914 
   1915     Label* map_check() { return &map_check_; }
   1916 
   1917    private:
   1918     LInstanceOfKnownGlobal* instr_;
   1919     Label map_check_;
   1920   };
   1921 
   1922 
   1923   DeferredInstanceOfKnownGlobal* deferred;
   1924   deferred = new DeferredInstanceOfKnownGlobal(this, instr);
   1925 
   1926   Label done, false_result;
   1927   Register object = ToRegister(instr->InputAt(0));
   1928 
   1929   // A Smi is not an instance of anything.
   1930   __ JumpIfSmi(object, &false_result);
   1931 
   1932   // This is the inlined call site instanceof cache. The two occurences of the
   1933   // hole value will be patched to the last map/result pair generated by the
   1934   // instanceof stub.
   1935   NearLabel cache_miss;
   1936   // Use a temp register to avoid memory operands with variable lengths.
   1937   Register map = ToRegister(instr->TempAt(0));
   1938   __ movq(map, FieldOperand(object, HeapObject::kMapOffset));
   1939   __ bind(deferred->map_check());  // Label for calculating code patching.
   1940   __ movq(kScratchRegister, factory()->the_hole_value(),
   1941           RelocInfo::EMBEDDED_OBJECT);
   1942   __ cmpq(map, kScratchRegister);  // Patched to cached map.
   1943   __ j(not_equal, &cache_miss);
   1944   // Patched to load either true or false.
   1945   __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
   1946 #ifdef DEBUG
   1947   // Check that the code size between patch label and patch sites is invariant.
   1948   Label end_of_patched_code;
   1949   __ bind(&end_of_patched_code);
   1950   ASSERT(true);
   1951 #endif
   1952   __ jmp(&done);
   1953 
   1954   // The inlined call site cache did not match. Check for null and string
   1955   // before calling the deferred code.
   1956   __ bind(&cache_miss);  // Null is not an instance of anything.
   1957   __ CompareRoot(object, Heap::kNullValueRootIndex);
   1958   __ j(equal, &false_result);
   1959 
   1960   // String values are not instances of anything.
   1961   __ JumpIfNotString(object, kScratchRegister, deferred->entry());
   1962 
   1963   __ bind(&false_result);
   1964   __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
   1965 
   1966   __ bind(deferred->exit());
   1967   __ bind(&done);
   1968 }
   1969 
   1970 
   1971 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
   1972                                                 Label* map_check) {
   1973   {
   1974     PushSafepointRegistersScope scope(this);
   1975     InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
   1976         InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
   1977     InstanceofStub stub(flags);
   1978 
   1979     __ push(ToRegister(instr->InputAt(0)));
   1980     __ Push(instr->function());
   1981 
   1982     Register temp = ToRegister(instr->TempAt(0));
   1983     static const int kAdditionalDelta = 10;
   1984     int delta =
   1985         masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
   1986     ASSERT(delta >= 0);
   1987     __ push_imm32(delta);
   1988 
   1989     // We are pushing three values on the stack but recording a
   1990     // safepoint with two arguments because stub is going to
   1991     // remove the third argument from the stack before jumping
   1992     // to instanceof builtin on the slow path.
   1993     CallCodeGeneric(stub.GetCode(),
   1994                     RelocInfo::CODE_TARGET,
   1995                     instr,
   1996                     RECORD_SAFEPOINT_WITH_REGISTERS,
   1997                     2);
   1998     ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
   1999     // Move result to a register that survives the end of the
   2000     // PushSafepointRegisterScope.
   2001     __ movq(kScratchRegister, rax);
   2002   }
   2003   __ testq(kScratchRegister, kScratchRegister);
   2004   Label load_false;
   2005   Label done;
   2006   __ j(not_zero, &load_false);
   2007   __ LoadRoot(rax, Heap::kTrueValueRootIndex);
   2008   __ jmp(&done);
   2009   __ bind(&load_false);
   2010   __ LoadRoot(rax, Heap::kFalseValueRootIndex);
   2011   __ bind(&done);
   2012 }
   2013 
   2014 
   2015 void LCodeGen::DoCmpT(LCmpT* instr) {
   2016   Token::Value op = instr->op();
   2017 
   2018   Handle<Code> ic = CompareIC::GetUninitialized(op);
   2019   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2020 
   2021   Condition condition = TokenToCondition(op, false);
   2022   if (op == Token::GT || op == Token::LTE) {
   2023     condition = ReverseCondition(condition);
   2024   }
   2025   NearLabel true_value, done;
   2026   __ testq(rax, rax);
   2027   __ j(condition, &true_value);
   2028   __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
   2029   __ jmp(&done);
   2030   __ bind(&true_value);
   2031   __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
   2032   __ bind(&done);
   2033 }
   2034 
   2035 
   2036 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
   2037   Token::Value op = instr->op();
   2038   int true_block = chunk_->LookupDestination(instr->true_block_id());
   2039   int false_block = chunk_->LookupDestination(instr->false_block_id());
   2040 
   2041   Handle<Code> ic = CompareIC::GetUninitialized(op);
   2042   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2043 
   2044   // The compare stub expects compare condition and the input operands
   2045   // reversed for GT and LTE.
   2046   Condition condition = TokenToCondition(op, false);
   2047   if (op == Token::GT || op == Token::LTE) {
   2048     condition = ReverseCondition(condition);
   2049   }
   2050   __ testq(rax, rax);
   2051   EmitBranch(true_block, false_block, condition);
   2052 }
   2053 
   2054 
   2055 void LCodeGen::DoReturn(LReturn* instr) {
   2056   if (FLAG_trace) {
   2057     // Preserve the return value on the stack and rely on the runtime
   2058     // call to return the value in the same register.
   2059     __ push(rax);
   2060     __ CallRuntime(Runtime::kTraceExit, 1);
   2061   }
   2062   __ movq(rsp, rbp);
   2063   __ pop(rbp);
   2064   __ Ret((ParameterCount() + 1) * kPointerSize, rcx);
   2065 }
   2066 
   2067 
   2068 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
   2069   Register result = ToRegister(instr->result());
   2070   if (result.is(rax)) {
   2071     __ load_rax(instr->hydrogen()->cell().location(),
   2072                 RelocInfo::GLOBAL_PROPERTY_CELL);
   2073   } else {
   2074     __ movq(result, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
   2075     __ movq(result, Operand(result, 0));
   2076   }
   2077   if (instr->hydrogen()->check_hole_value()) {
   2078     __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
   2079     DeoptimizeIf(equal, instr->environment());
   2080   }
   2081 }
   2082 
   2083 
   2084 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   2085   ASSERT(ToRegister(instr->global_object()).is(rax));
   2086   ASSERT(ToRegister(instr->result()).is(rax));
   2087 
   2088   __ Move(rcx, instr->name());
   2089   RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
   2090                                                RelocInfo::CODE_TARGET_CONTEXT;
   2091   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2092   CallCode(ic, mode, instr);
   2093 }
   2094 
   2095 
   2096 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
   2097   Register value = ToRegister(instr->InputAt(0));
   2098   Register temp = ToRegister(instr->TempAt(0));
   2099   ASSERT(!value.is(temp));
   2100   bool check_hole = instr->hydrogen()->check_hole_value();
   2101   if (!check_hole && value.is(rax)) {
   2102     __ store_rax(instr->hydrogen()->cell().location(),
   2103                  RelocInfo::GLOBAL_PROPERTY_CELL);
   2104     return;
   2105   }
   2106   // If the cell we are storing to contains the hole it could have
   2107   // been deleted from the property dictionary. In that case, we need
   2108   // to update the property details in the property dictionary to mark
   2109   // it as no longer deleted. We deoptimize in that case.
   2110   __ movq(temp, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
   2111   if (check_hole) {
   2112     __ CompareRoot(Operand(temp, 0), Heap::kTheHoleValueRootIndex);
   2113     DeoptimizeIf(equal, instr->environment());
   2114   }
   2115   __ movq(Operand(temp, 0), value);
   2116 }
   2117 
   2118 
   2119 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
   2120   ASSERT(ToRegister(instr->global_object()).is(rdx));
   2121   ASSERT(ToRegister(instr->value()).is(rax));
   2122 
   2123   __ Move(rcx, instr->name());
   2124   Handle<Code> ic = instr->strict_mode()
   2125       ? isolate()->builtins()->StoreIC_Initialize_Strict()
   2126       : isolate()->builtins()->StoreIC_Initialize();
   2127   CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
   2128 }
   2129 
   2130 
   2131 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
   2132   Register context = ToRegister(instr->context());
   2133   Register result = ToRegister(instr->result());
   2134   __ movq(result, ContextOperand(context, instr->slot_index()));
   2135 }
   2136 
   2137 
   2138 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
   2139   Register context = ToRegister(instr->context());
   2140   Register value = ToRegister(instr->value());
   2141   __ movq(ContextOperand(context, instr->slot_index()), value);
   2142   if (instr->needs_write_barrier()) {
   2143     int offset = Context::SlotOffset(instr->slot_index());
   2144     Register scratch = ToRegister(instr->TempAt(0));
   2145     __ RecordWrite(context, offset, value, scratch);
   2146   }
   2147 }
   2148 
   2149 
   2150 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
   2151   Register object = ToRegister(instr->InputAt(0));
   2152   Register result = ToRegister(instr->result());
   2153   if (instr->hydrogen()->is_in_object()) {
   2154     __ movq(result, FieldOperand(object, instr->hydrogen()->offset()));
   2155   } else {
   2156     __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
   2157     __ movq(result, FieldOperand(result, instr->hydrogen()->offset()));
   2158   }
   2159 }
   2160 
   2161 
   2162 void LCodeGen::EmitLoadField(Register result,
   2163                              Register object,
   2164                              Handle<Map> type,
   2165                              Handle<String> name) {
   2166   LookupResult lookup;
   2167   type->LookupInDescriptors(NULL, *name, &lookup);
   2168   ASSERT(lookup.IsProperty() && lookup.type() == FIELD);
   2169   int index = lookup.GetLocalFieldIndexFromMap(*type);
   2170   int offset = index * kPointerSize;
   2171   if (index < 0) {
   2172     // Negative property indices are in-object properties, indexed
   2173     // from the end of the fixed part of the object.
   2174     __ movq(result, FieldOperand(object, offset + type->instance_size()));
   2175   } else {
   2176     // Non-negative property indices are in the properties array.
   2177     __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
   2178     __ movq(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
   2179   }
   2180 }
   2181 
   2182 
   2183 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
   2184   Register object = ToRegister(instr->object());
   2185   Register result = ToRegister(instr->result());
   2186 
   2187   int map_count = instr->hydrogen()->types()->length();
   2188   Handle<String> name = instr->hydrogen()->name();
   2189 
   2190   if (map_count == 0) {
   2191     ASSERT(instr->hydrogen()->need_generic());
   2192     __ Move(rcx, instr->hydrogen()->name());
   2193     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2194     CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2195   } else {
   2196     NearLabel done;
   2197     for (int i = 0; i < map_count - 1; ++i) {
   2198       Handle<Map> map = instr->hydrogen()->types()->at(i);
   2199       NearLabel next;
   2200       __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
   2201       __ j(not_equal, &next);
   2202       EmitLoadField(result, object, map, name);
   2203       __ jmp(&done);
   2204       __ bind(&next);
   2205     }
   2206     Handle<Map> map = instr->hydrogen()->types()->last();
   2207     __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
   2208     if (instr->hydrogen()->need_generic()) {
   2209       NearLabel generic;
   2210       __ j(not_equal, &generic);
   2211       EmitLoadField(result, object, map, name);
   2212       __ jmp(&done);
   2213       __ bind(&generic);
   2214       __ Move(rcx, instr->hydrogen()->name());
   2215       Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2216       CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2217     } else {
   2218       DeoptimizeIf(not_equal, instr->environment());
   2219       EmitLoadField(result, object, map, name);
   2220     }
   2221     __ bind(&done);
   2222   }
   2223 }
   2224 
   2225 
   2226 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
   2227   ASSERT(ToRegister(instr->object()).is(rax));
   2228   ASSERT(ToRegister(instr->result()).is(rax));
   2229 
   2230   __ Move(rcx, instr->name());
   2231   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2232   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2233 }
   2234 
   2235 
   2236 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
   2237   Register function = ToRegister(instr->function());
   2238   Register result = ToRegister(instr->result());
   2239 
   2240   // Check that the function really is a function.
   2241   __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
   2242   DeoptimizeIf(not_equal, instr->environment());
   2243 
   2244   // Check whether the function has an instance prototype.
   2245   NearLabel non_instance;
   2246   __ testb(FieldOperand(result, Map::kBitFieldOffset),
   2247            Immediate(1 << Map::kHasNonInstancePrototype));
   2248   __ j(not_zero, &non_instance);
   2249 
   2250   // Get the prototype or initial map from the function.
   2251   __ movq(result,
   2252          FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2253 
   2254   // Check that the function has a prototype or an initial map.
   2255   __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
   2256   DeoptimizeIf(equal, instr->environment());
   2257 
   2258   // If the function does not have an initial map, we're done.
   2259   NearLabel done;
   2260   __ CmpObjectType(result, MAP_TYPE, kScratchRegister);
   2261   __ j(not_equal, &done);
   2262 
   2263   // Get the prototype from the initial map.
   2264   __ movq(result, FieldOperand(result, Map::kPrototypeOffset));
   2265   __ jmp(&done);
   2266 
   2267   // Non-instance prototype: Fetch prototype from constructor field
   2268   // in the function's map.
   2269   __ bind(&non_instance);
   2270   __ movq(result, FieldOperand(result, Map::kConstructorOffset));
   2271 
   2272   // All done.
   2273   __ bind(&done);
   2274 }
   2275 
   2276 
   2277 void LCodeGen::DoLoadElements(LLoadElements* instr) {
   2278   Register result = ToRegister(instr->result());
   2279   Register input = ToRegister(instr->InputAt(0));
   2280   __ movq(result, FieldOperand(input, JSObject::kElementsOffset));
   2281   if (FLAG_debug_code) {
   2282     NearLabel done;
   2283     __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
   2284                    Heap::kFixedArrayMapRootIndex);
   2285     __ j(equal, &done);
   2286     __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
   2287                    Heap::kFixedCOWArrayMapRootIndex);
   2288     __ j(equal, &done);
   2289     Register temp((result.is(rax)) ? rbx : rax);
   2290     __ push(temp);
   2291     __ movq(temp, FieldOperand(result, HeapObject::kMapOffset));
   2292     __ movzxbq(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
   2293     __ subq(temp, Immediate(FIRST_EXTERNAL_ARRAY_TYPE));
   2294     __ cmpq(temp, Immediate(kExternalArrayTypeCount));
   2295     __ pop(temp);
   2296     __ Check(below, "Check for fast elements failed.");
   2297     __ bind(&done);
   2298   }
   2299 }
   2300 
   2301 
   2302 void LCodeGen::DoLoadExternalArrayPointer(
   2303     LLoadExternalArrayPointer* instr) {
   2304   Register result = ToRegister(instr->result());
   2305   Register input = ToRegister(instr->InputAt(0));
   2306   __ movq(result, FieldOperand(input,
   2307                                ExternalPixelArray::kExternalPointerOffset));
   2308 }
   2309 
   2310 
   2311 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
   2312   Register arguments = ToRegister(instr->arguments());
   2313   Register length = ToRegister(instr->length());
   2314   Register result = ToRegister(instr->result());
   2315 
   2316   if (instr->index()->IsRegister()) {
   2317     __ subl(length, ToRegister(instr->index()));
   2318   } else {
   2319     __ subl(length, ToOperand(instr->index()));
   2320   }
   2321   DeoptimizeIf(below_equal, instr->environment());
   2322 
   2323   // There are two words between the frame pointer and the last argument.
   2324   // Subtracting from length accounts for one of them add one more.
   2325   __ movq(result, Operand(arguments, length, times_pointer_size, kPointerSize));
   2326 }
   2327 
   2328 
   2329 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
   2330   Register elements = ToRegister(instr->elements());
   2331   Register key = ToRegister(instr->key());
   2332   Register result = ToRegister(instr->result());
   2333   ASSERT(result.is(elements));
   2334 
   2335   // Load the result.
   2336   __ movq(result, FieldOperand(elements,
   2337                                key,
   2338                                times_pointer_size,
   2339                                FixedArray::kHeaderSize));
   2340 
   2341   // Check for the hole value.
   2342   __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
   2343   DeoptimizeIf(equal, instr->environment());
   2344 }
   2345 
   2346 
   2347 void LCodeGen::DoLoadKeyedSpecializedArrayElement(
   2348     LLoadKeyedSpecializedArrayElement* instr) {
   2349   Register external_pointer = ToRegister(instr->external_pointer());
   2350   Register key = ToRegister(instr->key());
   2351   ExternalArrayType array_type = instr->array_type();
   2352   if (array_type == kExternalFloatArray) {
   2353     XMMRegister result(ToDoubleRegister(instr->result()));
   2354     __ movss(result, Operand(external_pointer, key, times_4, 0));
   2355     __ cvtss2sd(result, result);
   2356   } else {
   2357     Register result(ToRegister(instr->result()));
   2358     switch (array_type) {
   2359       case kExternalByteArray:
   2360         __ movsxbq(result, Operand(external_pointer, key, times_1, 0));
   2361         break;
   2362       case kExternalUnsignedByteArray:
   2363       case kExternalPixelArray:
   2364         __ movzxbq(result, Operand(external_pointer, key, times_1, 0));
   2365         break;
   2366       case kExternalShortArray:
   2367         __ movsxwq(result, Operand(external_pointer, key, times_2, 0));
   2368         break;
   2369       case kExternalUnsignedShortArray:
   2370         __ movzxwq(result, Operand(external_pointer, key, times_2, 0));
   2371         break;
   2372       case kExternalIntArray:
   2373         __ movsxlq(result, Operand(external_pointer, key, times_4, 0));
   2374         break;
   2375       case kExternalUnsignedIntArray:
   2376         __ movl(result, Operand(external_pointer, key, times_4, 0));
   2377         __ testl(result, result);
   2378         // TODO(danno): we could be more clever here, perhaps having a special
   2379         // version of the stub that detects if the overflow case actually
   2380         // happens, and generate code that returns a double rather than int.
   2381         DeoptimizeIf(negative, instr->environment());
   2382         break;
   2383       case kExternalFloatArray:
   2384         UNREACHABLE();
   2385         break;
   2386     }
   2387   }
   2388 }
   2389 
   2390 
   2391 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
   2392   ASSERT(ToRegister(instr->object()).is(rdx));
   2393   ASSERT(ToRegister(instr->key()).is(rax));
   2394 
   2395   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   2396   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2397 }
   2398 
   2399 
   2400 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
   2401   Register result = ToRegister(instr->result());
   2402 
   2403   // Check for arguments adapter frame.
   2404   NearLabel done, adapted;
   2405   __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   2406   __ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
   2407          Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   2408   __ j(equal, &adapted);
   2409 
   2410   // No arguments adaptor frame.
   2411   __ movq(result, rbp);
   2412   __ jmp(&done);
   2413 
   2414   // Arguments adaptor frame present.
   2415   __ bind(&adapted);
   2416   __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   2417 
   2418   // Result is the frame pointer for the frame if not adapted and for the real
   2419   // frame below the adaptor frame if adapted.
   2420   __ bind(&done);
   2421 }
   2422 
   2423 
   2424 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
   2425   Register result = ToRegister(instr->result());
   2426 
   2427   NearLabel done;
   2428 
   2429   // If no arguments adaptor frame the number of arguments is fixed.
   2430   if (instr->InputAt(0)->IsRegister()) {
   2431     __ cmpq(rbp, ToRegister(instr->InputAt(0)));
   2432   } else {
   2433     __ cmpq(rbp, ToOperand(instr->InputAt(0)));
   2434   }
   2435   __ movl(result, Immediate(scope()->num_parameters()));
   2436   __ j(equal, &done);
   2437 
   2438   // Arguments adaptor frame present. Get argument length from there.
   2439   __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   2440   __ SmiToInteger32(result,
   2441                     Operand(result,
   2442                             ArgumentsAdaptorFrameConstants::kLengthOffset));
   2443 
   2444   // Argument length is in result register.
   2445   __ bind(&done);
   2446 }
   2447 
   2448 
   2449 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
   2450   Register receiver = ToRegister(instr->receiver());
   2451   Register function = ToRegister(instr->function());
   2452   Register length = ToRegister(instr->length());
   2453   Register elements = ToRegister(instr->elements());
   2454   ASSERT(receiver.is(rax));  // Used for parameter count.
   2455   ASSERT(function.is(rdi));  // Required by InvokeFunction.
   2456   ASSERT(ToRegister(instr->result()).is(rax));
   2457 
   2458   // If the receiver is null or undefined, we have to pass the global object
   2459   // as a receiver.
   2460   NearLabel global_object, receiver_ok;
   2461   __ CompareRoot(receiver, Heap::kNullValueRootIndex);
   2462   __ j(equal, &global_object);
   2463   __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
   2464   __ j(equal, &global_object);
   2465 
   2466   // The receiver should be a JS object.
   2467   Condition is_smi = __ CheckSmi(receiver);
   2468   DeoptimizeIf(is_smi, instr->environment());
   2469   __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, kScratchRegister);
   2470   DeoptimizeIf(below, instr->environment());
   2471   __ jmp(&receiver_ok);
   2472 
   2473   __ bind(&global_object);
   2474   // TODO(kmillikin): We have a hydrogen value for the global object.  See
   2475   // if it's better to use it than to explicitly fetch it from the context
   2476   // here.
   2477   __ movq(receiver, Operand(rbp, StandardFrameConstants::kContextOffset));
   2478   __ movq(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
   2479   __ bind(&receiver_ok);
   2480 
   2481   // Copy the arguments to this function possibly from the
   2482   // adaptor frame below it.
   2483   const uint32_t kArgumentsLimit = 1 * KB;
   2484   __ cmpq(length, Immediate(kArgumentsLimit));
   2485   DeoptimizeIf(above, instr->environment());
   2486 
   2487   __ push(receiver);
   2488   __ movq(receiver, length);
   2489 
   2490   // Loop through the arguments pushing them onto the execution
   2491   // stack.
   2492   NearLabel invoke, loop;
   2493   // length is a small non-negative integer, due to the test above.
   2494   __ testl(length, length);
   2495   __ j(zero, &invoke);
   2496   __ bind(&loop);
   2497   __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
   2498   __ decl(length);
   2499   __ j(not_zero, &loop);
   2500 
   2501   // Invoke the function.
   2502   __ bind(&invoke);
   2503   ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
   2504   LPointerMap* pointers = instr->pointer_map();
   2505   LEnvironment* env = instr->deoptimization_environment();
   2506   RecordPosition(pointers->position());
   2507   RegisterEnvironmentForDeoptimization(env);
   2508   SafepointGenerator safepoint_generator(this,
   2509                                          pointers,
   2510                                          env->deoptimization_index());
   2511   v8::internal::ParameterCount actual(rax);
   2512   __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
   2513 }
   2514 
   2515 
   2516 void LCodeGen::DoPushArgument(LPushArgument* instr) {
   2517   LOperand* argument = instr->InputAt(0);
   2518   if (argument->IsConstantOperand()) {
   2519     EmitPushConstantOperand(argument);
   2520   } else if (argument->IsRegister()) {
   2521     __ push(ToRegister(argument));
   2522   } else {
   2523     ASSERT(!argument->IsDoubleRegister());
   2524     __ push(ToOperand(argument));
   2525   }
   2526 }
   2527 
   2528 
   2529 void LCodeGen::DoContext(LContext* instr) {
   2530   Register result = ToRegister(instr->result());
   2531   __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset));
   2532 }
   2533 
   2534 
   2535 void LCodeGen::DoOuterContext(LOuterContext* instr) {
   2536   Register context = ToRegister(instr->context());
   2537   Register result = ToRegister(instr->result());
   2538   __ movq(result,
   2539           Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
   2540   __ movq(result, FieldOperand(result, JSFunction::kContextOffset));
   2541 }
   2542 
   2543 
   2544 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
   2545   Register result = ToRegister(instr->result());
   2546   __ movq(result, GlobalObjectOperand());
   2547 }
   2548 
   2549 
   2550 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
   2551   Register global = ToRegister(instr->global());
   2552   Register result = ToRegister(instr->result());
   2553   __ movq(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
   2554 }
   2555 
   2556 
   2557 void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
   2558                                  int arity,
   2559                                  LInstruction* instr) {
   2560   // Change context if needed.
   2561   bool change_context =
   2562       (info()->closure()->context() != function->context()) ||
   2563       scope()->contains_with() ||
   2564       (scope()->num_heap_slots() > 0);
   2565   if (change_context) {
   2566     __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
   2567   }
   2568 
   2569   // Set rax to arguments count if adaption is not needed. Assumes that rax
   2570   // is available to write to at this point.
   2571   if (!function->NeedsArgumentsAdaption()) {
   2572     __ Set(rax, arity);
   2573   }
   2574 
   2575   LPointerMap* pointers = instr->pointer_map();
   2576   RecordPosition(pointers->position());
   2577 
   2578   // Invoke function.
   2579   if (*function == *info()->closure()) {
   2580     __ CallSelf();
   2581   } else {
   2582     __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
   2583   }
   2584 
   2585   // Setup deoptimization.
   2586   RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0);
   2587 
   2588   // Restore context.
   2589   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2590 }
   2591 
   2592 
   2593 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
   2594   ASSERT(ToRegister(instr->result()).is(rax));
   2595   __ Move(rdi, instr->function());
   2596   CallKnownFunction(instr->function(), instr->arity(), instr);
   2597 }
   2598 
   2599 
   2600 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
   2601   Register input_reg = ToRegister(instr->InputAt(0));
   2602   __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
   2603                  Heap::kHeapNumberMapRootIndex);
   2604   DeoptimizeIf(not_equal, instr->environment());
   2605 
   2606   Label done;
   2607   Register tmp = input_reg.is(rax) ? rcx : rax;
   2608   Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx;
   2609 
   2610   // Preserve the value of all registers.
   2611   PushSafepointRegistersScope scope(this);
   2612 
   2613   Label negative;
   2614   __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
   2615   // Check the sign of the argument. If the argument is positive, just
   2616   // return it. We do not need to patch the stack since |input| and
   2617   // |result| are the same register and |input| will be restored
   2618   // unchanged by popping safepoint registers.
   2619   __ testl(tmp, Immediate(HeapNumber::kSignMask));
   2620   __ j(not_zero, &negative);
   2621   __ jmp(&done);
   2622 
   2623   __ bind(&negative);
   2624 
   2625   Label allocated, slow;
   2626   __ AllocateHeapNumber(tmp, tmp2, &slow);
   2627   __ jmp(&allocated);
   2628 
   2629   // Slow case: Call the runtime system to do the number allocation.
   2630   __ bind(&slow);
   2631 
   2632   CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
   2633   // Set the pointer to the new heap number in tmp.
   2634   if (!tmp.is(rax)) {
   2635     __ movq(tmp, rax);
   2636   }
   2637 
   2638   // Restore input_reg after call to runtime.
   2639   __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
   2640 
   2641   __ bind(&allocated);
   2642   __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset));
   2643   __ shl(tmp2, Immediate(1));
   2644   __ shr(tmp2, Immediate(1));
   2645   __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2);
   2646   __ StoreToSafepointRegisterSlot(input_reg, tmp);
   2647 
   2648   __ bind(&done);
   2649 }
   2650 
   2651 
   2652 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
   2653   Register input_reg = ToRegister(instr->InputAt(0));
   2654   __ testl(input_reg, input_reg);
   2655   Label is_positive;
   2656   __ j(not_sign, &is_positive);
   2657   __ negl(input_reg);  // Sets flags.
   2658   DeoptimizeIf(negative, instr->environment());
   2659   __ bind(&is_positive);
   2660 }
   2661 
   2662 
   2663 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
   2664   // Class for deferred case.
   2665   class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
   2666    public:
   2667     DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
   2668                                     LUnaryMathOperation* instr)
   2669         : LDeferredCode(codegen), instr_(instr) { }
   2670     virtual void Generate() {
   2671       codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
   2672     }
   2673    private:
   2674     LUnaryMathOperation* instr_;
   2675   };
   2676 
   2677   ASSERT(instr->InputAt(0)->Equals(instr->result()));
   2678   Representation r = instr->hydrogen()->value()->representation();
   2679 
   2680   if (r.IsDouble()) {
   2681     XMMRegister scratch = xmm0;
   2682     XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   2683     __ xorpd(scratch, scratch);
   2684     __ subsd(scratch, input_reg);
   2685     __ andpd(input_reg, scratch);
   2686   } else if (r.IsInteger32()) {
   2687     EmitIntegerMathAbs(instr);
   2688   } else {  // Tagged case.
   2689     DeferredMathAbsTaggedHeapNumber* deferred =
   2690         new DeferredMathAbsTaggedHeapNumber(this, instr);
   2691     Register input_reg = ToRegister(instr->InputAt(0));
   2692     // Smi check.
   2693     __ JumpIfNotSmi(input_reg, deferred->entry());
   2694     __ SmiToInteger32(input_reg, input_reg);
   2695     EmitIntegerMathAbs(instr);
   2696     __ Integer32ToSmi(input_reg, input_reg);
   2697     __ bind(deferred->exit());
   2698   }
   2699 }
   2700 
   2701 
   2702 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
   2703   XMMRegister xmm_scratch = xmm0;
   2704   Register output_reg = ToRegister(instr->result());
   2705   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   2706   __ xorpd(xmm_scratch, xmm_scratch);  // Zero the register.
   2707   __ ucomisd(input_reg, xmm_scratch);
   2708 
   2709   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
   2710     DeoptimizeIf(below_equal, instr->environment());
   2711   } else {
   2712     DeoptimizeIf(below, instr->environment());
   2713   }
   2714 
   2715   // Use truncating instruction (OK because input is positive).
   2716   __ cvttsd2si(output_reg, input_reg);
   2717 
   2718   // Overflow is signalled with minint.
   2719   __ cmpl(output_reg, Immediate(0x80000000));
   2720   DeoptimizeIf(equal, instr->environment());
   2721 }
   2722 
   2723 
   2724 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
   2725   const XMMRegister xmm_scratch = xmm0;
   2726   Register output_reg = ToRegister(instr->result());
   2727   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   2728 
   2729   // xmm_scratch = 0.5
   2730   __ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE);
   2731   __ movq(xmm_scratch, kScratchRegister);
   2732 
   2733   // input = input + 0.5
   2734   __ addsd(input_reg, xmm_scratch);
   2735 
   2736   // We need to return -0 for the input range [-0.5, 0[, otherwise
   2737   // compute Math.floor(value + 0.5).
   2738   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
   2739     __ ucomisd(input_reg, xmm_scratch);
   2740     DeoptimizeIf(below_equal, instr->environment());
   2741   } else {
   2742     // If we don't need to bailout on -0, we check only bailout
   2743     // on negative inputs.
   2744     __ xorpd(xmm_scratch, xmm_scratch);  // Zero the register.
   2745     __ ucomisd(input_reg, xmm_scratch);
   2746     DeoptimizeIf(below, instr->environment());
   2747   }
   2748 
   2749   // Compute Math.floor(value + 0.5).
   2750   // Use truncating instruction (OK because input is positive).
   2751   __ cvttsd2si(output_reg, input_reg);
   2752 
   2753   // Overflow is signalled with minint.
   2754   __ cmpl(output_reg, Immediate(0x80000000));
   2755   DeoptimizeIf(equal, instr->environment());
   2756 }
   2757 
   2758 
   2759 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
   2760   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   2761   ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
   2762   __ sqrtsd(input_reg, input_reg);
   2763 }
   2764 
   2765 
   2766 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
   2767   XMMRegister xmm_scratch = xmm0;
   2768   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   2769   ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
   2770   __ xorpd(xmm_scratch, xmm_scratch);
   2771   __ addsd(input_reg, xmm_scratch);  // Convert -0 to +0.
   2772   __ sqrtsd(input_reg, input_reg);
   2773 }
   2774 
   2775 
   2776 void LCodeGen::DoPower(LPower* instr) {
   2777   LOperand* left = instr->InputAt(0);
   2778   XMMRegister left_reg = ToDoubleRegister(left);
   2779   ASSERT(!left_reg.is(xmm1));
   2780   LOperand* right = instr->InputAt(1);
   2781   XMMRegister result_reg = ToDoubleRegister(instr->result());
   2782   Representation exponent_type = instr->hydrogen()->right()->representation();
   2783   if (exponent_type.IsDouble()) {
   2784     __ PrepareCallCFunction(2);
   2785     // Move arguments to correct registers
   2786     __ movsd(xmm0, left_reg);
   2787     ASSERT(ToDoubleRegister(right).is(xmm1));
   2788     __ CallCFunction(
   2789         ExternalReference::power_double_double_function(isolate()), 2);
   2790   } else if (exponent_type.IsInteger32()) {
   2791     __ PrepareCallCFunction(2);
   2792     // Move arguments to correct registers: xmm0 and edi (not rdi).
   2793     // On Windows, the registers are xmm0 and edx.
   2794     __ movsd(xmm0, left_reg);
   2795 #ifdef _WIN64
   2796     ASSERT(ToRegister(right).is(rdx));
   2797 #else
   2798     ASSERT(ToRegister(right).is(rdi));
   2799 #endif
   2800     __ CallCFunction(
   2801         ExternalReference::power_double_int_function(isolate()), 2);
   2802   } else {
   2803     ASSERT(exponent_type.IsTagged());
   2804     Register right_reg = ToRegister(right);
   2805 
   2806     Label non_smi, call;
   2807     __ JumpIfNotSmi(right_reg, &non_smi);
   2808     __ SmiToInteger32(right_reg, right_reg);
   2809     __ cvtlsi2sd(xmm1, right_reg);
   2810     __ jmp(&call);
   2811 
   2812     __ bind(&non_smi);
   2813     __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , kScratchRegister);
   2814     DeoptimizeIf(not_equal, instr->environment());
   2815     __ movsd(xmm1, FieldOperand(right_reg, HeapNumber::kValueOffset));
   2816 
   2817     __ bind(&call);
   2818     __ PrepareCallCFunction(2);
   2819     // Move arguments to correct registers xmm0 and xmm1.
   2820     __ movsd(xmm0, left_reg);
   2821     // Right argument is already in xmm1.
   2822     __ CallCFunction(
   2823         ExternalReference::power_double_double_function(isolate()), 2);
   2824   }
   2825   // Return value is in xmm0.
   2826   __ movsd(result_reg, xmm0);
   2827   // Restore context register.
   2828   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2829 }
   2830 
   2831 
   2832 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
   2833   ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
   2834   TranscendentalCacheStub stub(TranscendentalCache::LOG,
   2835                                TranscendentalCacheStub::UNTAGGED);
   2836   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   2837 }
   2838 
   2839 
   2840 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
   2841   ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
   2842   TranscendentalCacheStub stub(TranscendentalCache::COS,
   2843                                TranscendentalCacheStub::UNTAGGED);
   2844   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   2845 }
   2846 
   2847 
   2848 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
   2849   ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
   2850   TranscendentalCacheStub stub(TranscendentalCache::SIN,
   2851                                TranscendentalCacheStub::UNTAGGED);
   2852   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   2853 }
   2854 
   2855 
   2856 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
   2857   switch (instr->op()) {
   2858     case kMathAbs:
   2859       DoMathAbs(instr);
   2860       break;
   2861     case kMathFloor:
   2862       DoMathFloor(instr);
   2863       break;
   2864     case kMathRound:
   2865       DoMathRound(instr);
   2866       break;
   2867     case kMathSqrt:
   2868       DoMathSqrt(instr);
   2869       break;
   2870     case kMathPowHalf:
   2871       DoMathPowHalf(instr);
   2872       break;
   2873     case kMathCos:
   2874       DoMathCos(instr);
   2875       break;
   2876     case kMathSin:
   2877       DoMathSin(instr);
   2878       break;
   2879     case kMathLog:
   2880       DoMathLog(instr);
   2881       break;
   2882 
   2883     default:
   2884       UNREACHABLE();
   2885   }
   2886 }
   2887 
   2888 
   2889 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
   2890   ASSERT(ToRegister(instr->key()).is(rcx));
   2891   ASSERT(ToRegister(instr->result()).is(rax));
   2892 
   2893   int arity = instr->arity();
   2894   Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(
   2895     arity, NOT_IN_LOOP);
   2896   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2897   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2898 }
   2899 
   2900 
   2901 void LCodeGen::DoCallNamed(LCallNamed* instr) {
   2902   ASSERT(ToRegister(instr->result()).is(rax));
   2903 
   2904   int arity = instr->arity();
   2905   Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
   2906       arity, NOT_IN_LOOP);
   2907   __ Move(rcx, instr->name());
   2908   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2909   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2910 }
   2911 
   2912 
   2913 void LCodeGen::DoCallFunction(LCallFunction* instr) {
   2914   ASSERT(ToRegister(instr->result()).is(rax));
   2915 
   2916   int arity = instr->arity();
   2917   CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
   2918   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   2919   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2920   __ Drop(1);
   2921 }
   2922 
   2923 
   2924 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
   2925   ASSERT(ToRegister(instr->result()).is(rax));
   2926   int arity = instr->arity();
   2927   Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
   2928       arity, NOT_IN_LOOP);
   2929   __ Move(rcx, instr->name());
   2930   CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
   2931   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2932 }
   2933 
   2934 
   2935 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
   2936   ASSERT(ToRegister(instr->result()).is(rax));
   2937   __ Move(rdi, instr->target());
   2938   CallKnownFunction(instr->target(), instr->arity(), instr);
   2939 }
   2940 
   2941 
   2942 void LCodeGen::DoCallNew(LCallNew* instr) {
   2943   ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
   2944   ASSERT(ToRegister(instr->result()).is(rax));
   2945 
   2946   Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
   2947   __ Set(rax, instr->arity());
   2948   CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
   2949 }
   2950 
   2951 
   2952 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
   2953   CallRuntime(instr->function(), instr->arity(), instr);
   2954 }
   2955 
   2956 
   2957 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
   2958   Register object = ToRegister(instr->object());
   2959   Register value = ToRegister(instr->value());
   2960   int offset = instr->offset();
   2961 
   2962   if (!instr->transition().is_null()) {
   2963     __ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
   2964   }
   2965 
   2966   // Do the store.
   2967   if (instr->is_in_object()) {
   2968     __ movq(FieldOperand(object, offset), value);
   2969     if (instr->needs_write_barrier()) {
   2970       Register temp = ToRegister(instr->TempAt(0));
   2971       // Update the write barrier for the object for in-object properties.
   2972       __ RecordWrite(object, offset, value, temp);
   2973     }
   2974   } else {
   2975     Register temp = ToRegister(instr->TempAt(0));
   2976     __ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset));
   2977     __ movq(FieldOperand(temp, offset), value);
   2978     if (instr->needs_write_barrier()) {
   2979       // Update the write barrier for the properties array.
   2980       // object is used as a scratch register.
   2981       __ RecordWrite(temp, offset, value, object);
   2982     }
   2983   }
   2984 }
   2985 
   2986 
   2987 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
   2988   ASSERT(ToRegister(instr->object()).is(rdx));
   2989   ASSERT(ToRegister(instr->value()).is(rax));
   2990 
   2991   __ Move(rcx, instr->hydrogen()->name());
   2992   Handle<Code> ic = instr->strict_mode()
   2993       ? isolate()->builtins()->StoreIC_Initialize_Strict()
   2994       : isolate()->builtins()->StoreIC_Initialize();
   2995   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2996 }
   2997 
   2998 
   2999 void LCodeGen::DoStoreKeyedSpecializedArrayElement(
   3000     LStoreKeyedSpecializedArrayElement* instr) {
   3001   Register external_pointer = ToRegister(instr->external_pointer());
   3002   Register key = ToRegister(instr->key());
   3003   ExternalArrayType array_type = instr->array_type();
   3004   if (array_type == kExternalFloatArray) {
   3005     XMMRegister value(ToDoubleRegister(instr->value()));
   3006     __ cvtsd2ss(value, value);
   3007     __ movss(Operand(external_pointer, key, times_4, 0), value);
   3008   } else {
   3009     Register value(ToRegister(instr->value()));
   3010     switch (array_type) {
   3011       case kExternalPixelArray:
   3012         {  // Clamp the value to [0..255].
   3013           NearLabel done;
   3014           __ testl(value, Immediate(0xFFFFFF00));
   3015           __ j(zero, &done);
   3016           __ setcc(negative, value);  // 1 if negative, 0 if positive.
   3017           __ decb(value);  // 0 if negative, 255 if positive.
   3018           __ bind(&done);
   3019           __ movb(Operand(external_pointer, key, times_1, 0), value);
   3020         }
   3021         break;
   3022       case kExternalByteArray:
   3023       case kExternalUnsignedByteArray:
   3024         __ movb(Operand(external_pointer, key, times_1, 0), value);
   3025         break;
   3026       case kExternalShortArray:
   3027       case kExternalUnsignedShortArray:
   3028         __ movw(Operand(external_pointer, key, times_2, 0), value);
   3029         break;
   3030       case kExternalIntArray:
   3031       case kExternalUnsignedIntArray:
   3032         __ movl(Operand(external_pointer, key, times_4, 0), value);
   3033         break;
   3034       case kExternalFloatArray:
   3035         UNREACHABLE();
   3036         break;
   3037     }
   3038   }
   3039 }
   3040 
   3041 
   3042 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
   3043   if (instr->length()->IsRegister()) {
   3044     __ cmpq(ToRegister(instr->index()), ToRegister(instr->length()));
   3045   } else {
   3046     __ cmpq(ToRegister(instr->index()), ToOperand(instr->length()));
   3047   }
   3048   DeoptimizeIf(above_equal, instr->environment());
   3049 }
   3050 
   3051 
   3052 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
   3053   Register value = ToRegister(instr->value());
   3054   Register elements = ToRegister(instr->object());
   3055   Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
   3056 
   3057   // Do the store.
   3058   if (instr->key()->IsConstantOperand()) {
   3059     ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
   3060     LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
   3061     int offset =
   3062         ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
   3063     __ movq(FieldOperand(elements, offset), value);
   3064   } else {
   3065     __ movq(FieldOperand(elements,
   3066                          key,
   3067                          times_pointer_size,
   3068                          FixedArray::kHeaderSize),
   3069             value);
   3070   }
   3071 
   3072   if (instr->hydrogen()->NeedsWriteBarrier()) {
   3073     // Compute address of modified element and store it into key register.
   3074     __ lea(key, FieldOperand(elements,
   3075                              key,
   3076                              times_pointer_size,
   3077                              FixedArray::kHeaderSize));
   3078     __ RecordWrite(elements, key, value);
   3079   }
   3080 }
   3081 
   3082 
   3083 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
   3084   ASSERT(ToRegister(instr->object()).is(rdx));
   3085   ASSERT(ToRegister(instr->key()).is(rcx));
   3086   ASSERT(ToRegister(instr->value()).is(rax));
   3087 
   3088   Handle<Code> ic = instr->strict_mode()
   3089       ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
   3090       : isolate()->builtins()->KeyedStoreIC_Initialize();
   3091   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   3092 }
   3093 
   3094 
   3095 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
   3096   class DeferredStringCharCodeAt: public LDeferredCode {
   3097    public:
   3098     DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
   3099         : LDeferredCode(codegen), instr_(instr) { }
   3100     virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
   3101    private:
   3102     LStringCharCodeAt* instr_;
   3103   };
   3104 
   3105   Register string = ToRegister(instr->string());
   3106   Register index = no_reg;
   3107   int const_index = -1;
   3108   if (instr->index()->IsConstantOperand()) {
   3109     const_index = ToInteger32(LConstantOperand::cast(instr->index()));
   3110     STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
   3111     if (!Smi::IsValid(const_index)) {
   3112       // Guaranteed to be out of bounds because of the assert above.
   3113       // So the bounds check that must dominate this instruction must
   3114       // have deoptimized already.
   3115       if (FLAG_debug_code) {
   3116         __ Abort("StringCharCodeAt: out of bounds index.");
   3117       }
   3118       // No code needs to be generated.
   3119       return;
   3120     }
   3121   } else {
   3122     index = ToRegister(instr->index());
   3123   }
   3124   Register result = ToRegister(instr->result());
   3125 
   3126   DeferredStringCharCodeAt* deferred =
   3127       new DeferredStringCharCodeAt(this, instr);
   3128 
   3129   NearLabel flat_string, ascii_string, done;
   3130 
   3131   // Fetch the instance type of the receiver into result register.
   3132   __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
   3133   __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
   3134 
   3135   // We need special handling for non-sequential strings.
   3136   STATIC_ASSERT(kSeqStringTag == 0);
   3137   __ testb(result, Immediate(kStringRepresentationMask));
   3138   __ j(zero, &flat_string);
   3139 
   3140   // Handle cons strings and go to deferred code for the rest.
   3141   __ testb(result, Immediate(kIsConsStringMask));
   3142   __ j(zero, deferred->entry());
   3143 
   3144   // ConsString.
   3145   // Check whether the right hand side is the empty string (i.e. if
   3146   // this is really a flat string in a cons string). If that is not
   3147   // the case we would rather go to the runtime system now to flatten
   3148   // the string.
   3149   __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
   3150                  Heap::kEmptyStringRootIndex);
   3151   __ j(not_equal, deferred->entry());
   3152   // Get the first of the two strings and load its instance type.
   3153   __ movq(string, FieldOperand(string, ConsString::kFirstOffset));
   3154   __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
   3155   __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
   3156   // If the first cons component is also non-flat, then go to runtime.
   3157   STATIC_ASSERT(kSeqStringTag == 0);
   3158   __ testb(result, Immediate(kStringRepresentationMask));
   3159   __ j(not_zero, deferred->entry());
   3160 
   3161   // Check for ASCII or two-byte string.
   3162   __ bind(&flat_string);
   3163   STATIC_ASSERT(kAsciiStringTag != 0);
   3164   __ testb(result, Immediate(kStringEncodingMask));
   3165   __ j(not_zero, &ascii_string);
   3166 
   3167   // Two-byte string.
   3168   // Load the two-byte character code into the result register.
   3169   STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   3170   if (instr->index()->IsConstantOperand()) {
   3171     __ movzxwl(result,
   3172                FieldOperand(string,
   3173                             SeqTwoByteString::kHeaderSize +
   3174                             (kUC16Size * const_index)));
   3175   } else {
   3176     __ movzxwl(result, FieldOperand(string,
   3177                                     index,
   3178                                     times_2,
   3179                                     SeqTwoByteString::kHeaderSize));
   3180   }
   3181   __ jmp(&done);
   3182 
   3183   // ASCII string.
   3184   // Load the byte into the result register.
   3185   __ bind(&ascii_string);
   3186   if (instr->index()->IsConstantOperand()) {
   3187     __ movzxbl(result, FieldOperand(string,
   3188                                     SeqAsciiString::kHeaderSize + const_index));
   3189   } else {
   3190     __ movzxbl(result, FieldOperand(string,
   3191                                     index,
   3192                                     times_1,
   3193                                     SeqAsciiString::kHeaderSize));
   3194   }
   3195   __ bind(&done);
   3196   __ bind(deferred->exit());
   3197 }
   3198 
   3199 
   3200 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
   3201   Register string = ToRegister(instr->string());
   3202   Register result = ToRegister(instr->result());
   3203 
   3204   // TODO(3095996): Get rid of this. For now, we need to make the
   3205   // result register contain a valid pointer because it is already
   3206   // contained in the register pointer map.
   3207   __ Set(result, 0);
   3208 
   3209   PushSafepointRegistersScope scope(this);
   3210   __ push(string);
   3211   // Push the index as a smi. This is safe because of the checks in
   3212   // DoStringCharCodeAt above.
   3213   STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
   3214   if (instr->index()->IsConstantOperand()) {
   3215     int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
   3216     __ Push(Smi::FromInt(const_index));
   3217   } else {
   3218     Register index = ToRegister(instr->index());
   3219     __ Integer32ToSmi(index, index);
   3220     __ push(index);
   3221   }
   3222   CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
   3223   if (FLAG_debug_code) {
   3224     __ AbortIfNotSmi(rax);
   3225   }
   3226   __ SmiToInteger32(rax, rax);
   3227   __ StoreToSafepointRegisterSlot(result, rax);
   3228 }
   3229 
   3230 
   3231 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
   3232   class DeferredStringCharFromCode: public LDeferredCode {
   3233    public:
   3234     DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
   3235         : LDeferredCode(codegen), instr_(instr) { }
   3236     virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
   3237    private:
   3238     LStringCharFromCode* instr_;
   3239   };
   3240 
   3241   DeferredStringCharFromCode* deferred =
   3242       new DeferredStringCharFromCode(this, instr);
   3243 
   3244   ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
   3245   Register char_code = ToRegister(instr->char_code());
   3246   Register result = ToRegister(instr->result());
   3247   ASSERT(!char_code.is(result));
   3248 
   3249   __ cmpl(char_code, Immediate(String::kMaxAsciiCharCode));
   3250   __ j(above, deferred->entry());
   3251   __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
   3252   __ movq(result, FieldOperand(result,
   3253                                char_code, times_pointer_size,
   3254                                FixedArray::kHeaderSize));
   3255   __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
   3256   __ j(equal, deferred->entry());
   3257   __ bind(deferred->exit());
   3258 }
   3259 
   3260 
   3261 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
   3262   Register char_code = ToRegister(instr->char_code());
   3263   Register result = ToRegister(instr->result());
   3264 
   3265   // TODO(3095996): Get rid of this. For now, we need to make the
   3266   // result register contain a valid pointer because it is already
   3267   // contained in the register pointer map.
   3268   __ Set(result, 0);
   3269 
   3270   PushSafepointRegistersScope scope(this);
   3271   __ Integer32ToSmi(char_code, char_code);
   3272   __ push(char_code);
   3273   CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
   3274   __ StoreToSafepointRegisterSlot(result, rax);
   3275 }
   3276 
   3277 
   3278 void LCodeGen::DoStringLength(LStringLength* instr) {
   3279   Register string = ToRegister(instr->string());
   3280   Register result = ToRegister(instr->result());
   3281   __ movq(result, FieldOperand(string, String::kLengthOffset));
   3282 }
   3283 
   3284 
   3285 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
   3286   LOperand* input = instr->InputAt(0);
   3287   ASSERT(input->IsRegister() || input->IsStackSlot());
   3288   LOperand* output = instr->result();
   3289   ASSERT(output->IsDoubleRegister());
   3290   if (input->IsRegister()) {
   3291     __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
   3292   } else {
   3293     __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
   3294   }
   3295 }
   3296 
   3297 
   3298 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
   3299   LOperand* input = instr->InputAt(0);
   3300   ASSERT(input->IsRegister() && input->Equals(instr->result()));
   3301   Register reg = ToRegister(input);
   3302 
   3303   __ Integer32ToSmi(reg, reg);
   3304 }
   3305 
   3306 
   3307 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
   3308   class DeferredNumberTagD: public LDeferredCode {
   3309    public:
   3310     DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
   3311         : LDeferredCode(codegen), instr_(instr) { }
   3312     virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
   3313    private:
   3314     LNumberTagD* instr_;
   3315   };
   3316 
   3317   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   3318   Register reg = ToRegister(instr->result());
   3319   Register tmp = ToRegister(instr->TempAt(0));
   3320 
   3321   DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
   3322   if (FLAG_inline_new) {
   3323     __ AllocateHeapNumber(reg, tmp, deferred->entry());
   3324   } else {
   3325     __ jmp(deferred->entry());
   3326   }
   3327   __ bind(deferred->exit());
   3328   __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
   3329 }
   3330 
   3331 
   3332 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
   3333   // TODO(3095996): Get rid of this. For now, we need to make the
   3334   // result register contain a valid pointer because it is already
   3335   // contained in the register pointer map.
   3336   Register reg = ToRegister(instr->result());
   3337   __ Move(reg, Smi::FromInt(0));
   3338 
   3339   {
   3340     PushSafepointRegistersScope scope(this);
   3341     CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
   3342     // Ensure that value in rax survives popping registers.
   3343     __ movq(kScratchRegister, rax);
   3344   }
   3345   __ movq(reg, kScratchRegister);
   3346 }
   3347 
   3348 
   3349 void LCodeGen::DoSmiTag(LSmiTag* instr) {
   3350   ASSERT(instr->InputAt(0)->Equals(instr->result()));
   3351   Register input = ToRegister(instr->InputAt(0));
   3352   ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
   3353   __ Integer32ToSmi(input, input);
   3354 }
   3355 
   3356 
   3357 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
   3358   ASSERT(instr->InputAt(0)->Equals(instr->result()));
   3359   Register input = ToRegister(instr->InputAt(0));
   3360   if (instr->needs_check()) {
   3361     Condition is_smi = __ CheckSmi(input);
   3362     DeoptimizeIf(NegateCondition(is_smi), instr->environment());
   3363   }
   3364   __ SmiToInteger32(input, input);
   3365 }
   3366 
   3367 
   3368 void LCodeGen::EmitNumberUntagD(Register input_reg,
   3369                                 XMMRegister result_reg,
   3370                                 bool deoptimize_on_undefined,
   3371                                 LEnvironment* env) {
   3372   NearLabel load_smi, done;
   3373 
   3374   // Smi check.
   3375   __ JumpIfSmi(input_reg, &load_smi);
   3376 
   3377   // Heap number map check.
   3378   __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
   3379                  Heap::kHeapNumberMapRootIndex);
   3380   if (deoptimize_on_undefined) {
   3381     DeoptimizeIf(not_equal, env);
   3382   } else {
   3383     NearLabel heap_number;
   3384     __ j(equal, &heap_number);
   3385     __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
   3386     DeoptimizeIf(not_equal, env);
   3387 
   3388     // Convert undefined to NaN. Compute NaN as 0/0.
   3389     __ xorpd(result_reg, result_reg);
   3390     __ divsd(result_reg, result_reg);
   3391     __ jmp(&done);
   3392 
   3393     __ bind(&heap_number);
   3394   }
   3395   // Heap number to XMM conversion.
   3396   __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
   3397   __ jmp(&done);
   3398 
   3399   // Smi to XMM conversion
   3400   __ bind(&load_smi);
   3401   __ SmiToInteger32(kScratchRegister, input_reg);
   3402   __ cvtlsi2sd(result_reg, kScratchRegister);
   3403   __ bind(&done);
   3404 }
   3405 
   3406 
   3407 class DeferredTaggedToI: public LDeferredCode {
   3408  public:
   3409   DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
   3410       : LDeferredCode(codegen), instr_(instr) { }
   3411   virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
   3412  private:
   3413   LTaggedToI* instr_;
   3414 };
   3415 
   3416 
   3417 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
   3418   NearLabel done, heap_number;
   3419   Register input_reg = ToRegister(instr->InputAt(0));
   3420 
   3421   // Heap number map check.
   3422   __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
   3423                  Heap::kHeapNumberMapRootIndex);
   3424 
   3425   if (instr->truncating()) {
   3426     __ j(equal, &heap_number);
   3427     // Check for undefined. Undefined is converted to zero for truncating
   3428     // conversions.
   3429     __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
   3430     DeoptimizeIf(not_equal, instr->environment());
   3431     __ Set(input_reg, 0);
   3432     __ jmp(&done);
   3433 
   3434     __ bind(&heap_number);
   3435 
   3436     __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
   3437     __ cvttsd2siq(input_reg, xmm0);
   3438     __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000));
   3439     __ cmpq(input_reg, kScratchRegister);
   3440     DeoptimizeIf(equal, instr->environment());
   3441   } else {
   3442     // Deoptimize if we don't have a heap number.
   3443     DeoptimizeIf(not_equal, instr->environment());
   3444 
   3445     XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
   3446     __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
   3447     __ cvttsd2si(input_reg, xmm0);
   3448     __ cvtlsi2sd(xmm_temp, input_reg);
   3449     __ ucomisd(xmm0, xmm_temp);
   3450     DeoptimizeIf(not_equal, instr->environment());
   3451     DeoptimizeIf(parity_even, instr->environment());  // NaN.
   3452     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
   3453       __ testl(input_reg, input_reg);
   3454       __ j(not_zero, &done);
   3455       __ movmskpd(input_reg, xmm0);
   3456       __ andl(input_reg, Immediate(1));
   3457       DeoptimizeIf(not_zero, instr->environment());
   3458     }
   3459   }
   3460   __ bind(&done);
   3461 }
   3462 
   3463 
   3464 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
   3465   LOperand* input = instr->InputAt(0);
   3466   ASSERT(input->IsRegister());
   3467   ASSERT(input->Equals(instr->result()));
   3468 
   3469   Register input_reg = ToRegister(input);
   3470   DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
   3471   __ JumpIfNotSmi(input_reg, deferred->entry());
   3472   __ SmiToInteger32(input_reg, input_reg);
   3473   __ bind(deferred->exit());
   3474 }
   3475 
   3476 
   3477 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
   3478   LOperand* input = instr->InputAt(0);
   3479   ASSERT(input->IsRegister());
   3480   LOperand* result = instr->result();
   3481   ASSERT(result->IsDoubleRegister());
   3482 
   3483   Register input_reg = ToRegister(input);
   3484   XMMRegister result_reg = ToDoubleRegister(result);
   3485 
   3486   EmitNumberUntagD(input_reg, result_reg,
   3487                    instr->hydrogen()->deoptimize_on_undefined(),
   3488                    instr->environment());
   3489 }
   3490 
   3491 
   3492 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
   3493   LOperand* input = instr->InputAt(0);
   3494   ASSERT(input->IsDoubleRegister());
   3495   LOperand* result = instr->result();
   3496   ASSERT(result->IsRegister());
   3497 
   3498   XMMRegister input_reg = ToDoubleRegister(input);
   3499   Register result_reg = ToRegister(result);
   3500 
   3501   if (instr->truncating()) {
   3502     // Performs a truncating conversion of a floating point number as used by
   3503     // the JS bitwise operations.
   3504     __ cvttsd2siq(result_reg, input_reg);
   3505     __ movq(kScratchRegister, V8_INT64_C(0x8000000000000000), RelocInfo::NONE);
   3506     __ cmpq(result_reg, kScratchRegister);
   3507       DeoptimizeIf(equal, instr->environment());
   3508   } else {
   3509     __ cvttsd2si(result_reg, input_reg);
   3510     __ cvtlsi2sd(xmm0, result_reg);
   3511     __ ucomisd(xmm0, input_reg);
   3512     DeoptimizeIf(not_equal, instr->environment());
   3513     DeoptimizeIf(parity_even, instr->environment());  // NaN.
   3514     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
   3515       NearLabel done;
   3516       // The integer converted back is equal to the original. We
   3517       // only have to test if we got -0 as an input.
   3518       __ testl(result_reg, result_reg);
   3519       __ j(not_zero, &done);
   3520       __ movmskpd(result_reg, input_reg);
   3521       // Bit 0 contains the sign of the double in input_reg.
   3522       // If input was positive, we are ok and return 0, otherwise
   3523       // deoptimize.
   3524       __ andl(result_reg, Immediate(1));
   3525       DeoptimizeIf(not_zero, instr->environment());
   3526       __ bind(&done);
   3527     }
   3528   }
   3529 }
   3530 
   3531 
   3532 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
   3533   LOperand* input = instr->InputAt(0);
   3534   Condition cc = masm()->CheckSmi(ToRegister(input));
   3535   DeoptimizeIf(NegateCondition(cc), instr->environment());
   3536 }
   3537 
   3538 
   3539 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
   3540   LOperand* input = instr->InputAt(0);
   3541   Condition cc = masm()->CheckSmi(ToRegister(input));
   3542   DeoptimizeIf(cc, instr->environment());
   3543 }
   3544 
   3545 
   3546 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
   3547   Register input = ToRegister(instr->InputAt(0));
   3548   InstanceType first = instr->hydrogen()->first();
   3549   InstanceType last = instr->hydrogen()->last();
   3550 
   3551   __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
   3552 
   3553   // If there is only one type in the interval check for equality.
   3554   if (first == last) {
   3555     __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
   3556             Immediate(static_cast<int8_t>(first)));
   3557     DeoptimizeIf(not_equal, instr->environment());
   3558   } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) {
   3559     // String has a dedicated bit in instance type.
   3560     __ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
   3561              Immediate(kIsNotStringMask));
   3562     DeoptimizeIf(not_zero, instr->environment());
   3563   } else {
   3564     __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
   3565             Immediate(static_cast<int8_t>(first)));
   3566     DeoptimizeIf(below, instr->environment());
   3567     // Omit check for the last type.
   3568     if (last != LAST_TYPE) {
   3569       __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
   3570               Immediate(static_cast<int8_t>(last)));
   3571       DeoptimizeIf(above, instr->environment());
   3572     }
   3573   }
   3574 }
   3575 
   3576 
   3577 void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
   3578   ASSERT(instr->InputAt(0)->IsRegister());
   3579   Register reg = ToRegister(instr->InputAt(0));
   3580   __ Cmp(reg, instr->hydrogen()->target());
   3581   DeoptimizeIf(not_equal, instr->environment());
   3582 }
   3583 
   3584 
   3585 void LCodeGen::DoCheckMap(LCheckMap* instr) {
   3586   LOperand* input = instr->InputAt(0);
   3587   ASSERT(input->IsRegister());
   3588   Register reg = ToRegister(input);
   3589   __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
   3590          instr->hydrogen()->map());
   3591   DeoptimizeIf(not_equal, instr->environment());
   3592 }
   3593 
   3594 
   3595 void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
   3596   if (heap()->InNewSpace(*object)) {
   3597     Handle<JSGlobalPropertyCell> cell =
   3598         factory()->NewJSGlobalPropertyCell(object);
   3599     __ movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
   3600     __ movq(result, Operand(result, 0));
   3601   } else {
   3602     __ Move(result, object);
   3603   }
   3604 }
   3605 
   3606 
   3607 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
   3608   Register reg = ToRegister(instr->TempAt(0));
   3609 
   3610   Handle<JSObject> holder = instr->holder();
   3611   Handle<JSObject> current_prototype = instr->prototype();
   3612 
   3613   // Load prototype object.
   3614   LoadHeapObject(reg, current_prototype);
   3615 
   3616   // Check prototype maps up to the holder.
   3617   while (!current_prototype.is_identical_to(holder)) {
   3618     __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
   3619            Handle<Map>(current_prototype->map()));
   3620     DeoptimizeIf(not_equal, instr->environment());
   3621     current_prototype =
   3622         Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
   3623     // Load next prototype object.
   3624     LoadHeapObject(reg, current_prototype);
   3625   }
   3626 
   3627   // Check the holder map.
   3628   __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
   3629          Handle<Map>(current_prototype->map()));
   3630   DeoptimizeIf(not_equal, instr->environment());
   3631 }
   3632 
   3633 
   3634 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
   3635   // Setup the parameters to the stub/runtime call.
   3636   __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   3637   __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
   3638   __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
   3639   __ Push(instr->hydrogen()->constant_elements());
   3640 
   3641   // Pick the right runtime function or stub to call.
   3642   int length = instr->hydrogen()->length();
   3643   if (instr->hydrogen()->IsCopyOnWrite()) {
   3644     ASSERT(instr->hydrogen()->depth() == 1);
   3645     FastCloneShallowArrayStub::Mode mode =
   3646         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
   3647     FastCloneShallowArrayStub stub(mode, length);
   3648     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   3649   } else if (instr->hydrogen()->depth() > 1) {
   3650     CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
   3651   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
   3652     CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
   3653   } else {
   3654     FastCloneShallowArrayStub::Mode mode =
   3655         FastCloneShallowArrayStub::CLONE_ELEMENTS;
   3656     FastCloneShallowArrayStub stub(mode, length);
   3657     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   3658   }
   3659 }
   3660 
   3661 
   3662 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
   3663   // Setup the parameters to the stub/runtime call.
   3664   __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   3665   __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
   3666   __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
   3667   __ Push(instr->hydrogen()->constant_properties());
   3668   __ Push(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0));
   3669 
   3670   // Pick the right runtime function to call.
   3671   if (instr->hydrogen()->depth() > 1) {
   3672     CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
   3673   } else {
   3674     CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
   3675   }
   3676 }
   3677 
   3678 
   3679 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
   3680   ASSERT(ToRegister(instr->InputAt(0)).is(rax));
   3681   __ push(rax);
   3682   CallRuntime(Runtime::kToFastProperties, 1, instr);
   3683 }
   3684 
   3685 
   3686 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
   3687   NearLabel materialized;
   3688   // Registers will be used as follows:
   3689   // rdi = JS function.
   3690   // rcx = literals array.
   3691   // rbx = regexp literal.
   3692   // rax = regexp literal clone.
   3693   __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   3694   __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
   3695   int literal_offset = FixedArray::kHeaderSize +
   3696       instr->hydrogen()->literal_index() * kPointerSize;
   3697   __ movq(rbx, FieldOperand(rcx, literal_offset));
   3698   __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
   3699   __ j(not_equal, &materialized);
   3700 
   3701   // Create regexp literal using runtime function
   3702   // Result will be in rax.
   3703   __ push(rcx);
   3704   __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
   3705   __ Push(instr->hydrogen()->pattern());
   3706   __ Push(instr->hydrogen()->flags());
   3707   CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
   3708   __ movq(rbx, rax);
   3709 
   3710   __ bind(&materialized);
   3711   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
   3712   Label allocated, runtime_allocate;
   3713   __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
   3714   __ jmp(&allocated);
   3715 
   3716   __ bind(&runtime_allocate);
   3717   __ push(rbx);
   3718   __ Push(Smi::FromInt(size));
   3719   CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
   3720   __ pop(rbx);
   3721 
   3722   __ bind(&allocated);
   3723   // Copy the content into the newly allocated memory.
   3724   // (Unroll copy loop once for better throughput).
   3725   for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
   3726     __ movq(rdx, FieldOperand(rbx, i));
   3727     __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
   3728     __ movq(FieldOperand(rax, i), rdx);
   3729     __ movq(FieldOperand(rax, i + kPointerSize), rcx);
   3730   }
   3731   if ((size % (2 * kPointerSize)) != 0) {
   3732     __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
   3733     __ movq(FieldOperand(rax, size - kPointerSize), rdx);
   3734   }
   3735 }
   3736 
   3737 
   3738 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
   3739   // Use the fast case closure allocation code that allocates in new
   3740   // space for nested functions that don't need literals cloning.
   3741   Handle<SharedFunctionInfo> shared_info = instr->shared_info();
   3742   bool pretenure = instr->hydrogen()->pretenure();
   3743   if (!pretenure && shared_info->num_literals() == 0) {
   3744     FastNewClosureStub stub(
   3745         shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
   3746     __ Push(shared_info);
   3747     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   3748   } else {
   3749     __ push(rsi);
   3750     __ Push(shared_info);
   3751     __ PushRoot(pretenure ?
   3752                 Heap::kTrueValueRootIndex :
   3753                 Heap::kFalseValueRootIndex);
   3754     CallRuntime(Runtime::kNewClosure, 3, instr);
   3755   }
   3756 }
   3757 
   3758 
   3759 void LCodeGen::DoTypeof(LTypeof* instr) {
   3760   LOperand* input = instr->InputAt(0);
   3761   if (input->IsConstantOperand()) {
   3762     __ Push(ToHandle(LConstantOperand::cast(input)));
   3763   } else if (input->IsRegister()) {
   3764     __ push(ToRegister(input));
   3765   } else {
   3766     ASSERT(input->IsStackSlot());
   3767     __ push(ToOperand(input));
   3768   }
   3769   CallRuntime(Runtime::kTypeof, 1, instr);
   3770 }
   3771 
   3772 
   3773 void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
   3774   Register input = ToRegister(instr->InputAt(0));
   3775   Register result = ToRegister(instr->result());
   3776   Label true_label;
   3777   Label false_label;
   3778   NearLabel done;
   3779 
   3780   Condition final_branch_condition = EmitTypeofIs(&true_label,
   3781                                                   &false_label,
   3782                                                   input,
   3783                                                   instr->type_literal());
   3784   __ j(final_branch_condition, &true_label);
   3785   __ bind(&false_label);
   3786   __ LoadRoot(result, Heap::kFalseValueRootIndex);
   3787   __ jmp(&done);
   3788 
   3789   __ bind(&true_label);
   3790   __ LoadRoot(result, Heap::kTrueValueRootIndex);
   3791 
   3792   __ bind(&done);
   3793 }
   3794 
   3795 
   3796 void LCodeGen::EmitPushConstantOperand(LOperand* operand) {
   3797   ASSERT(operand->IsConstantOperand());
   3798   LConstantOperand* const_op = LConstantOperand::cast(operand);
   3799   Handle<Object> literal = chunk_->LookupLiteral(const_op);
   3800   Representation r = chunk_->LookupLiteralRepresentation(const_op);
   3801   if (r.IsInteger32()) {
   3802     ASSERT(literal->IsNumber());
   3803     __ push(Immediate(static_cast<int32_t>(literal->Number())));
   3804   } else if (r.IsDouble()) {
   3805     Abort("unsupported double immediate");
   3806   } else {
   3807     ASSERT(r.IsTagged());
   3808     __ Push(literal);
   3809   }
   3810 }
   3811 
   3812 
   3813 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
   3814   Register input = ToRegister(instr->InputAt(0));
   3815   int true_block = chunk_->LookupDestination(instr->true_block_id());
   3816   int false_block = chunk_->LookupDestination(instr->false_block_id());
   3817   Label* true_label = chunk_->GetAssemblyLabel(true_block);
   3818   Label* false_label = chunk_->GetAssemblyLabel(false_block);
   3819 
   3820   Condition final_branch_condition = EmitTypeofIs(true_label,
   3821                                                   false_label,
   3822                                                   input,
   3823                                                   instr->type_literal());
   3824 
   3825   EmitBranch(true_block, false_block, final_branch_condition);
   3826 }
   3827 
   3828 
   3829 Condition LCodeGen::EmitTypeofIs(Label* true_label,
   3830                                  Label* false_label,
   3831                                  Register input,
   3832                                  Handle<String> type_name) {
   3833   Condition final_branch_condition = no_condition;
   3834   if (type_name->Equals(heap()->number_symbol())) {
   3835     __ JumpIfSmi(input, true_label);
   3836     __ CompareRoot(FieldOperand(input, HeapObject::kMapOffset),
   3837                    Heap::kHeapNumberMapRootIndex);
   3838 
   3839     final_branch_condition = equal;
   3840 
   3841   } else if (type_name->Equals(heap()->string_symbol())) {
   3842     __ JumpIfSmi(input, false_label);
   3843     __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
   3844     __ j(above_equal, false_label);
   3845     __ testb(FieldOperand(input, Map::kBitFieldOffset),
   3846              Immediate(1 << Map::kIsUndetectable));
   3847     final_branch_condition = zero;
   3848 
   3849   } else if (type_name->Equals(heap()->boolean_symbol())) {
   3850     __ CompareRoot(input, Heap::kTrueValueRootIndex);
   3851     __ j(equal, true_label);
   3852     __ CompareRoot(input, Heap::kFalseValueRootIndex);
   3853     final_branch_condition = equal;
   3854 
   3855   } else if (type_name->Equals(heap()->undefined_symbol())) {
   3856     __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
   3857     __ j(equal, true_label);
   3858     __ JumpIfSmi(input, false_label);
   3859     // Check for undetectable objects => true.
   3860     __ movq(input, FieldOperand(input, HeapObject::kMapOffset));
   3861     __ testb(FieldOperand(input, Map::kBitFieldOffset),
   3862              Immediate(1 << Map::kIsUndetectable));
   3863     final_branch_condition = not_zero;
   3864 
   3865   } else if (type_name->Equals(heap()->function_symbol())) {
   3866     __ JumpIfSmi(input, false_label);
   3867     __ CmpObjectType(input, FIRST_FUNCTION_CLASS_TYPE, input);
   3868     final_branch_condition = above_equal;
   3869 
   3870   } else if (type_name->Equals(heap()->object_symbol())) {
   3871     __ JumpIfSmi(input, false_label);
   3872     __ CompareRoot(input, Heap::kNullValueRootIndex);
   3873     __ j(equal, true_label);
   3874     __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, input);
   3875     __ j(below, false_label);
   3876     __ CmpInstanceType(input, FIRST_FUNCTION_CLASS_TYPE);
   3877     __ j(above_equal, false_label);
   3878     // Check for undetectable objects => false.
   3879     __ testb(FieldOperand(input, Map::kBitFieldOffset),
   3880              Immediate(1 << Map::kIsUndetectable));
   3881     final_branch_condition = zero;
   3882 
   3883   } else {
   3884     final_branch_condition = never;
   3885     __ jmp(false_label);
   3886   }
   3887 
   3888   return final_branch_condition;
   3889 }
   3890 
   3891 
   3892 void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
   3893   Register result = ToRegister(instr->result());
   3894   NearLabel true_label;
   3895   NearLabel false_label;
   3896   NearLabel done;
   3897 
   3898   EmitIsConstructCall(result);
   3899   __ j(equal, &true_label);
   3900 
   3901   __ LoadRoot(result, Heap::kFalseValueRootIndex);
   3902   __ jmp(&done);
   3903 
   3904   __ bind(&true_label);
   3905   __ LoadRoot(result, Heap::kTrueValueRootIndex);
   3906 
   3907 
   3908   __ bind(&done);
   3909 }
   3910 
   3911 
   3912 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
   3913   Register temp = ToRegister(instr->TempAt(0));
   3914   int true_block = chunk_->LookupDestination(instr->true_block_id());
   3915   int false_block = chunk_->LookupDestination(instr->false_block_id());
   3916 
   3917   EmitIsConstructCall(temp);
   3918   EmitBranch(true_block, false_block, equal);
   3919 }
   3920 
   3921 
   3922 void LCodeGen::EmitIsConstructCall(Register temp) {
   3923   // Get the frame pointer for the calling frame.
   3924   __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   3925 
   3926   // Skip the arguments adaptor frame if it exists.
   3927   NearLabel check_frame_marker;
   3928   __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset),
   3929          Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   3930   __ j(not_equal, &check_frame_marker);
   3931   __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
   3932 
   3933   // Check the marker in the calling frame.
   3934   __ bind(&check_frame_marker);
   3935   __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
   3936          Smi::FromInt(StackFrame::CONSTRUCT));
   3937 }
   3938 
   3939 
   3940 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
   3941   // No code for lazy bailout instruction. Used to capture environment after a
   3942   // call for populating the safepoint data with deoptimization data.
   3943 }
   3944 
   3945 
   3946 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
   3947   DeoptimizeIf(no_condition, instr->environment());
   3948 }
   3949 
   3950 
   3951 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
   3952   LOperand* obj = instr->object();
   3953   LOperand* key = instr->key();
   3954   // Push object.
   3955   if (obj->IsRegister()) {
   3956     __ push(ToRegister(obj));
   3957   } else {
   3958     __ push(ToOperand(obj));
   3959   }
   3960   // Push key.
   3961   if (key->IsConstantOperand()) {
   3962     EmitPushConstantOperand(key);
   3963   } else if (key->IsRegister()) {
   3964     __ push(ToRegister(key));
   3965   } else {
   3966     __ push(ToOperand(key));
   3967   }
   3968   ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
   3969   LPointerMap* pointers = instr->pointer_map();
   3970   LEnvironment* env = instr->deoptimization_environment();
   3971   RecordPosition(pointers->position());
   3972   RegisterEnvironmentForDeoptimization(env);
   3973   // Create safepoint generator that will also ensure enough space in the
   3974   // reloc info for patching in deoptimization (since this is invoking a
   3975   // builtin)
   3976   SafepointGenerator safepoint_generator(this,
   3977                                          pointers,
   3978                                          env->deoptimization_index());
   3979   __ Push(Smi::FromInt(strict_mode_flag()));
   3980   __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
   3981 }
   3982 
   3983 
   3984 void LCodeGen::DoStackCheck(LStackCheck* instr) {
   3985   // Perform stack overflow check.
   3986   NearLabel done;
   3987   __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
   3988   __ j(above_equal, &done);
   3989 
   3990   StackCheckStub stub;
   3991   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   3992   __ bind(&done);
   3993 }
   3994 
   3995 
   3996 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
   3997   // This is a pseudo-instruction that ensures that the environment here is
   3998   // properly registered for deoptimization and records the assembler's PC
   3999   // offset.
   4000   LEnvironment* environment = instr->environment();
   4001   environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
   4002                                    instr->SpilledDoubleRegisterArray());
   4003 
   4004   // If the environment were already registered, we would have no way of
   4005   // backpatching it with the spill slot operands.
   4006   ASSERT(!environment->HasBeenRegistered());
   4007   RegisterEnvironmentForDeoptimization(environment);
   4008   ASSERT(osr_pc_offset_ == -1);
   4009   osr_pc_offset_ = masm()->pc_offset();
   4010 }
   4011 
   4012 #undef __
   4013 
   4014 } }  // namespace v8::internal
   4015 
   4016 #endif  // V8_TARGET_ARCH_X64
   4017