Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_X64)
     31 
     32 #include "x64/lithium-codegen-x64.h"
     33 #include "code-stubs.h"
     34 #include "stub-cache.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 
     40 // When invoking builtins, we need to record the safepoint in the middle of
     41 // the invoke instruction sequence generated by the macro assembler.
     42 class SafepointGenerator : public CallWrapper {
     43  public:
     44   SafepointGenerator(LCodeGen* codegen,
     45                      LPointerMap* pointers,
     46                      Safepoint::DeoptMode mode)
     47       : codegen_(codegen),
     48         pointers_(pointers),
     49         deopt_mode_(mode) { }
     50   virtual ~SafepointGenerator() { }
     51 
     52   virtual void BeforeCall(int call_size) const {
     53     codegen_->EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - call_size);
     54   }
     55 
     56   virtual void AfterCall() const {
     57     codegen_->RecordSafepoint(pointers_, deopt_mode_);
     58   }
     59 
     60  private:
     61   LCodeGen* codegen_;
     62   LPointerMap* pointers_;
     63   Safepoint::DeoptMode deopt_mode_;
     64 };
     65 
     66 
     67 #define __ masm()->
     68 
     69 bool LCodeGen::GenerateCode() {
     70   HPhase phase("Z_Code generation", chunk());
     71   ASSERT(is_unused());
     72   status_ = GENERATING;
     73 
     74   // Open a frame scope to indicate that there is a frame on the stack.  The
     75   // MANUAL indicates that the scope shouldn't actually generate code to set up
     76   // the frame (that is done in GeneratePrologue).
     77   FrameScope frame_scope(masm_, StackFrame::MANUAL);
     78 
     79   return GeneratePrologue() &&
     80       GenerateBody() &&
     81       GenerateDeferredCode() &&
     82       GenerateJumpTable() &&
     83       GenerateSafepointTable();
     84 }
     85 
     86 
     87 void LCodeGen::FinishCode(Handle<Code> code) {
     88   ASSERT(is_done());
     89   code->set_stack_slots(GetStackSlotCount());
     90   code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
     91   PopulateDeoptimizationData(code);
     92 }
     93 
     94 
     95 void LCodeGen::Abort(const char* format, ...) {
     96   if (FLAG_trace_bailout) {
     97     SmartArrayPointer<char> name(
     98         info()->shared_info()->DebugName()->ToCString());
     99     PrintF("Aborting LCodeGen in @\"%s\": ", *name);
    100     va_list arguments;
    101     va_start(arguments, format);
    102     OS::VPrint(format, arguments);
    103     va_end(arguments);
    104     PrintF("\n");
    105   }
    106   status_ = ABORTED;
    107 }
    108 
    109 
    110 void LCodeGen::Comment(const char* format, ...) {
    111   if (!FLAG_code_comments) return;
    112   char buffer[4 * KB];
    113   StringBuilder builder(buffer, ARRAY_SIZE(buffer));
    114   va_list arguments;
    115   va_start(arguments, format);
    116   builder.AddFormattedList(format, arguments);
    117   va_end(arguments);
    118 
    119   // Copy the string before recording it in the assembler to avoid
    120   // issues when the stack allocated buffer goes out of scope.
    121   int length = builder.position();
    122   Vector<char> copy = Vector<char>::New(length + 1);
    123   memcpy(copy.start(), builder.Finalize(), copy.length());
    124   masm()->RecordComment(copy.start());
    125 }
    126 
    127 
    128 bool LCodeGen::GeneratePrologue() {
    129   ASSERT(is_generating());
    130 
    131 #ifdef DEBUG
    132   if (strlen(FLAG_stop_at) > 0 &&
    133       info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
    134     __ int3();
    135   }
    136 #endif
    137 
    138   // Strict mode functions need to replace the receiver with undefined
    139   // when called as functions (without an explicit receiver
    140   // object). rcx is zero for method calls and non-zero for function
    141   // calls.
    142   if (!info_->is_classic_mode() || info_->is_native()) {
    143     Label ok;
    144     __ testq(rcx, rcx);
    145     __ j(zero, &ok, Label::kNear);
    146     // +1 for return address.
    147     int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
    148     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
    149     __ movq(Operand(rsp, receiver_offset), kScratchRegister);
    150     __ bind(&ok);
    151   }
    152 
    153   __ push(rbp);  // Caller's frame pointer.
    154   __ movq(rbp, rsp);
    155   __ push(rsi);  // Callee's context.
    156   __ push(rdi);  // Callee's JS function.
    157 
    158   // Reserve space for the stack slots needed by the code.
    159   int slots = GetStackSlotCount();
    160   if (slots > 0) {
    161     if (FLAG_debug_code) {
    162       __ Set(rax, slots);
    163       __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
    164       Label loop;
    165       __ bind(&loop);
    166       __ push(kScratchRegister);
    167       __ decl(rax);
    168       __ j(not_zero, &loop);
    169     } else {
    170       __ subq(rsp, Immediate(slots * kPointerSize));
    171 #ifdef _MSC_VER
    172       // On windows, you may not access the stack more than one page below
    173       // the most recently mapped page. To make the allocated area randomly
    174       // accessible, we write to each page in turn (the value is irrelevant).
    175       const int kPageSize = 4 * KB;
    176       for (int offset = slots * kPointerSize - kPageSize;
    177            offset > 0;
    178            offset -= kPageSize) {
    179         __ movq(Operand(rsp, offset), rax);
    180       }
    181 #endif
    182     }
    183   }
    184 
    185   // Possibly allocate a local context.
    186   int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    187   if (heap_slots > 0) {
    188     Comment(";;; Allocate local context");
    189     // Argument to NewContext is the function, which is still in rdi.
    190     __ push(rdi);
    191     if (heap_slots <= FastNewContextStub::kMaximumSlots) {
    192       FastNewContextStub stub(heap_slots);
    193       __ CallStub(&stub);
    194     } else {
    195       __ CallRuntime(Runtime::kNewFunctionContext, 1);
    196     }
    197     RecordSafepoint(Safepoint::kNoLazyDeopt);
    198     // Context is returned in both rax and rsi.  It replaces the context
    199     // passed to us.  It's saved in the stack and kept live in rsi.
    200     __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
    201 
    202     // Copy any necessary parameters into the context.
    203     int num_parameters = scope()->num_parameters();
    204     for (int i = 0; i < num_parameters; i++) {
    205       Variable* var = scope()->parameter(i);
    206       if (var->IsContextSlot()) {
    207         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    208             (num_parameters - 1 - i) * kPointerSize;
    209         // Load parameter from stack.
    210         __ movq(rax, Operand(rbp, parameter_offset));
    211         // Store it in the context.
    212         int context_offset = Context::SlotOffset(var->index());
    213         __ movq(Operand(rsi, context_offset), rax);
    214         // Update the write barrier. This clobbers rax and rbx.
    215         __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs);
    216       }
    217     }
    218     Comment(";;; End allocate local context");
    219   }
    220 
    221   // Trace the call.
    222   if (FLAG_trace) {
    223     __ CallRuntime(Runtime::kTraceEnter, 0);
    224   }
    225   return !is_aborted();
    226 }
    227 
    228 
    229 bool LCodeGen::GenerateBody() {
    230   ASSERT(is_generating());
    231   bool emit_instructions = true;
    232   for (current_instruction_ = 0;
    233        !is_aborted() && current_instruction_ < instructions_->length();
    234        current_instruction_++) {
    235     LInstruction* instr = instructions_->at(current_instruction_);
    236     if (instr->IsLabel()) {
    237       LLabel* label = LLabel::cast(instr);
    238       emit_instructions = !label->HasReplacement();
    239     }
    240 
    241     if (emit_instructions) {
    242       Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
    243       instr->CompileToNative(this);
    244     }
    245   }
    246   EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
    247   return !is_aborted();
    248 }
    249 
    250 
    251 bool LCodeGen::GenerateJumpTable() {
    252   for (int i = 0; i < jump_table_.length(); i++) {
    253     __ bind(&jump_table_[i].label);
    254     __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY);
    255   }
    256   return !is_aborted();
    257 }
    258 
    259 
    260 bool LCodeGen::GenerateDeferredCode() {
    261   ASSERT(is_generating());
    262   if (deferred_.length() > 0) {
    263     for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
    264       LDeferredCode* code = deferred_[i];
    265       __ bind(code->entry());
    266       Comment(";;; Deferred code @%d: %s.",
    267               code->instruction_index(),
    268               code->instr()->Mnemonic());
    269       code->Generate();
    270       __ jmp(code->exit());
    271     }
    272   }
    273 
    274   // Deferred code is the last part of the instruction sequence. Mark
    275   // the generated code as done unless we bailed out.
    276   if (!is_aborted()) status_ = DONE;
    277   return !is_aborted();
    278 }
    279 
    280 
    281 bool LCodeGen::GenerateSafepointTable() {
    282   ASSERT(is_done());
    283   safepoints_.Emit(masm(), GetStackSlotCount());
    284   return !is_aborted();
    285 }
    286 
    287 
    288 Register LCodeGen::ToRegister(int index) const {
    289   return Register::FromAllocationIndex(index);
    290 }
    291 
    292 
    293 XMMRegister LCodeGen::ToDoubleRegister(int index) const {
    294   return XMMRegister::FromAllocationIndex(index);
    295 }
    296 
    297 
    298 Register LCodeGen::ToRegister(LOperand* op) const {
    299   ASSERT(op->IsRegister());
    300   return ToRegister(op->index());
    301 }
    302 
    303 
    304 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
    305   ASSERT(op->IsDoubleRegister());
    306   return ToDoubleRegister(op->index());
    307 }
    308 
    309 
    310 bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const {
    311   return op->IsConstantOperand() &&
    312       chunk_->LookupLiteralRepresentation(op).IsInteger32();
    313 }
    314 
    315 
    316 bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const {
    317   return op->IsConstantOperand() &&
    318       chunk_->LookupLiteralRepresentation(op).IsTagged();
    319 }
    320 
    321 
    322 int LCodeGen::ToInteger32(LConstantOperand* op) const {
    323   Handle<Object> value = chunk_->LookupLiteral(op);
    324   ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
    325   ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
    326       value->Number());
    327   return static_cast<int32_t>(value->Number());
    328 }
    329 
    330 
    331 double LCodeGen::ToDouble(LConstantOperand* op) const {
    332   Handle<Object> value = chunk_->LookupLiteral(op);
    333   return value->Number();
    334 }
    335 
    336 
    337 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
    338   Handle<Object> literal = chunk_->LookupLiteral(op);
    339   ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
    340   return literal;
    341 }
    342 
    343 
    344 Operand LCodeGen::ToOperand(LOperand* op) const {
    345   // Does not handle registers. In X64 assembler, plain registers are not
    346   // representable as an Operand.
    347   ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
    348   int index = op->index();
    349   if (index >= 0) {
    350     // Local or spill slot. Skip the frame pointer, function, and
    351     // context in the fixed part of the frame.
    352     return Operand(rbp, -(index + 3) * kPointerSize);
    353   } else {
    354     // Incoming parameter. Skip the return address.
    355     return Operand(rbp, -(index - 1) * kPointerSize);
    356   }
    357 }
    358 
    359 
    360 void LCodeGen::WriteTranslation(LEnvironment* environment,
    361                                 Translation* translation) {
    362   if (environment == NULL) return;
    363 
    364   // The translation includes one command per value in the environment.
    365   int translation_size = environment->values()->length();
    366   // The output frame height does not include the parameters.
    367   int height = translation_size - environment->parameter_count();
    368 
    369   WriteTranslation(environment->outer(), translation);
    370   int closure_id = DefineDeoptimizationLiteral(environment->closure());
    371   switch (environment->frame_type()) {
    372     case JS_FUNCTION:
    373       translation->BeginJSFrame(environment->ast_id(), closure_id, height);
    374       break;
    375     case JS_CONSTRUCT:
    376       translation->BeginConstructStubFrame(closure_id, translation_size);
    377       break;
    378     case ARGUMENTS_ADAPTOR:
    379       translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
    380       break;
    381     default:
    382       UNREACHABLE();
    383   }
    384   for (int i = 0; i < translation_size; ++i) {
    385     LOperand* value = environment->values()->at(i);
    386     // spilled_registers_ and spilled_double_registers_ are either
    387     // both NULL or both set.
    388     if (environment->spilled_registers() != NULL && value != NULL) {
    389       if (value->IsRegister() &&
    390           environment->spilled_registers()[value->index()] != NULL) {
    391         translation->MarkDuplicate();
    392         AddToTranslation(translation,
    393                          environment->spilled_registers()[value->index()],
    394                          environment->HasTaggedValueAt(i));
    395       } else if (
    396           value->IsDoubleRegister() &&
    397           environment->spilled_double_registers()[value->index()] != NULL) {
    398         translation->MarkDuplicate();
    399         AddToTranslation(
    400             translation,
    401             environment->spilled_double_registers()[value->index()],
    402             false);
    403       }
    404     }
    405 
    406     AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
    407   }
    408 }
    409 
    410 
    411 void LCodeGen::AddToTranslation(Translation* translation,
    412                                 LOperand* op,
    413                                 bool is_tagged) {
    414   if (op == NULL) {
    415     // TODO(twuerthinger): Introduce marker operands to indicate that this value
    416     // is not present and must be reconstructed from the deoptimizer. Currently
    417     // this is only used for the arguments object.
    418     translation->StoreArgumentsObject();
    419   } else if (op->IsStackSlot()) {
    420     if (is_tagged) {
    421       translation->StoreStackSlot(op->index());
    422     } else {
    423       translation->StoreInt32StackSlot(op->index());
    424     }
    425   } else if (op->IsDoubleStackSlot()) {
    426     translation->StoreDoubleStackSlot(op->index());
    427   } else if (op->IsArgument()) {
    428     ASSERT(is_tagged);
    429     int src_index = GetStackSlotCount() + op->index();
    430     translation->StoreStackSlot(src_index);
    431   } else if (op->IsRegister()) {
    432     Register reg = ToRegister(op);
    433     if (is_tagged) {
    434       translation->StoreRegister(reg);
    435     } else {
    436       translation->StoreInt32Register(reg);
    437     }
    438   } else if (op->IsDoubleRegister()) {
    439     XMMRegister reg = ToDoubleRegister(op);
    440     translation->StoreDoubleRegister(reg);
    441   } else if (op->IsConstantOperand()) {
    442     Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
    443     int src_index = DefineDeoptimizationLiteral(literal);
    444     translation->StoreLiteral(src_index);
    445   } else {
    446     UNREACHABLE();
    447   }
    448 }
    449 
    450 
    451 void LCodeGen::CallCodeGeneric(Handle<Code> code,
    452                                RelocInfo::Mode mode,
    453                                LInstruction* instr,
    454                                SafepointMode safepoint_mode,
    455                                int argc) {
    456   EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - masm()->CallSize(code));
    457   ASSERT(instr != NULL);
    458   LPointerMap* pointers = instr->pointer_map();
    459   RecordPosition(pointers->position());
    460   __ call(code, mode);
    461   RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc);
    462 
    463   // Signal that we don't inline smi code before these stubs in the
    464   // optimizing code generator.
    465   if (code->kind() == Code::BINARY_OP_IC ||
    466       code->kind() == Code::COMPARE_IC) {
    467     __ nop();
    468   }
    469 }
    470 
    471 
    472 void LCodeGen::CallCode(Handle<Code> code,
    473                         RelocInfo::Mode mode,
    474                         LInstruction* instr) {
    475   CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0);
    476 }
    477 
    478 
    479 void LCodeGen::CallRuntime(const Runtime::Function* function,
    480                            int num_arguments,
    481                            LInstruction* instr) {
    482   ASSERT(instr != NULL);
    483   ASSERT(instr->HasPointerMap());
    484   LPointerMap* pointers = instr->pointer_map();
    485   RecordPosition(pointers->position());
    486 
    487   __ CallRuntime(function, num_arguments);
    488   RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
    489 }
    490 
    491 
    492 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
    493                                        int argc,
    494                                        LInstruction* instr) {
    495   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
    496   __ CallRuntimeSaveDoubles(id);
    497   RecordSafepointWithRegisters(
    498       instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
    499 }
    500 
    501 
    502 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
    503                                                     Safepoint::DeoptMode mode) {
    504   if (!environment->HasBeenRegistered()) {
    505     // Physical stack frame layout:
    506     // -x ............. -4  0 ..................................... y
    507     // [incoming arguments] [spill slots] [pushed outgoing arguments]
    508 
    509     // Layout of the environment:
    510     // 0 ..................................................... size-1
    511     // [parameters] [locals] [expression stack including arguments]
    512 
    513     // Layout of the translation:
    514     // 0 ........................................................ size - 1 + 4
    515     // [expression stack including arguments] [locals] [4 words] [parameters]
    516     // |>------------  translation_size ------------<|
    517 
    518     int frame_count = 0;
    519     int jsframe_count = 0;
    520     for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
    521       ++frame_count;
    522       if (e->frame_type() == JS_FUNCTION) {
    523         ++jsframe_count;
    524       }
    525     }
    526     Translation translation(&translations_, frame_count, jsframe_count);
    527     WriteTranslation(environment, &translation);
    528     int deoptimization_index = deoptimizations_.length();
    529     int pc_offset = masm()->pc_offset();
    530     environment->Register(deoptimization_index,
    531                           translation.index(),
    532                           (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
    533     deoptimizations_.Add(environment);
    534   }
    535 }
    536 
    537 
    538 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
    539   RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
    540   ASSERT(environment->HasBeenRegistered());
    541   int id = environment->deoptimization_index();
    542   Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
    543   if (entry == NULL) {
    544     Abort("bailout was not prepared");
    545     return;
    546   }
    547 
    548   if (cc == no_condition) {
    549     __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
    550   } else {
    551     // We often have several deopts to the same entry, reuse the last
    552     // jump entry if this is the case.
    553     if (jump_table_.is_empty() ||
    554         jump_table_.last().address != entry) {
    555       jump_table_.Add(JumpTableEntry(entry));
    556     }
    557     __ j(cc, &jump_table_.last().label);
    558   }
    559 }
    560 
    561 
    562 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
    563   int length = deoptimizations_.length();
    564   if (length == 0) return;
    565   Handle<DeoptimizationInputData> data =
    566       factory()->NewDeoptimizationInputData(length, TENURED);
    567 
    568   Handle<ByteArray> translations = translations_.CreateByteArray();
    569   data->SetTranslationByteArray(*translations);
    570   data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
    571 
    572   Handle<FixedArray> literals =
    573       factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
    574   for (int i = 0; i < deoptimization_literals_.length(); i++) {
    575     literals->set(i, *deoptimization_literals_[i]);
    576   }
    577   data->SetLiteralArray(*literals);
    578 
    579   data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
    580   data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
    581 
    582   // Populate the deoptimization entries.
    583   for (int i = 0; i < length; i++) {
    584     LEnvironment* env = deoptimizations_[i];
    585     data->SetAstId(i, Smi::FromInt(env->ast_id()));
    586     data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
    587     data->SetArgumentsStackHeight(i,
    588                                   Smi::FromInt(env->arguments_stack_height()));
    589     data->SetPc(i, Smi::FromInt(env->pc_offset()));
    590   }
    591   code->set_deoptimization_data(*data);
    592 }
    593 
    594 
    595 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
    596   int result = deoptimization_literals_.length();
    597   for (int i = 0; i < deoptimization_literals_.length(); ++i) {
    598     if (deoptimization_literals_[i].is_identical_to(literal)) return i;
    599   }
    600   deoptimization_literals_.Add(literal);
    601   return result;
    602 }
    603 
    604 
    605 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
    606   ASSERT(deoptimization_literals_.length() == 0);
    607 
    608   const ZoneList<Handle<JSFunction> >* inlined_closures =
    609       chunk()->inlined_closures();
    610 
    611   for (int i = 0, length = inlined_closures->length();
    612        i < length;
    613        i++) {
    614     DefineDeoptimizationLiteral(inlined_closures->at(i));
    615   }
    616 
    617   inlined_function_count_ = deoptimization_literals_.length();
    618 }
    619 
    620 
    621 void LCodeGen::RecordSafepointWithLazyDeopt(
    622     LInstruction* instr, SafepointMode safepoint_mode, int argc) {
    623   if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
    624     RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
    625   } else {
    626     ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS);
    627     RecordSafepointWithRegisters(
    628         instr->pointer_map(), argc, Safepoint::kLazyDeopt);
    629   }
    630 }
    631 
    632 
    633 void LCodeGen::RecordSafepoint(
    634     LPointerMap* pointers,
    635     Safepoint::Kind kind,
    636     int arguments,
    637     Safepoint::DeoptMode deopt_mode) {
    638   ASSERT(kind == expected_safepoint_kind_);
    639 
    640   const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
    641 
    642   Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
    643       kind, arguments, deopt_mode);
    644   for (int i = 0; i < operands->length(); i++) {
    645     LOperand* pointer = operands->at(i);
    646     if (pointer->IsStackSlot()) {
    647       safepoint.DefinePointerSlot(pointer->index());
    648     } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
    649       safepoint.DefinePointerRegister(ToRegister(pointer));
    650     }
    651   }
    652   if (kind & Safepoint::kWithRegisters) {
    653     // Register rsi always contains a pointer to the context.
    654     safepoint.DefinePointerRegister(rsi);
    655   }
    656 }
    657 
    658 
    659 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
    660                                Safepoint::DeoptMode deopt_mode) {
    661   RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
    662 }
    663 
    664 
    665 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
    666   LPointerMap empty_pointers(RelocInfo::kNoPosition);
    667   RecordSafepoint(&empty_pointers, deopt_mode);
    668 }
    669 
    670 
    671 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
    672                                             int arguments,
    673                                             Safepoint::DeoptMode deopt_mode) {
    674   RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
    675 }
    676 
    677 
    678 void LCodeGen::RecordPosition(int position) {
    679   if (position == RelocInfo::kNoPosition) return;
    680   masm()->positions_recorder()->RecordPosition(position);
    681 }
    682 
    683 
    684 void LCodeGen::DoLabel(LLabel* label) {
    685   if (label->is_loop_header()) {
    686     Comment(";;; B%d - LOOP entry", label->block_id());
    687   } else {
    688     Comment(";;; B%d", label->block_id());
    689   }
    690   __ bind(label->label());
    691   current_block_ = label->block_id();
    692   DoGap(label);
    693 }
    694 
    695 
    696 void LCodeGen::DoParallelMove(LParallelMove* move) {
    697   resolver_.Resolve(move);
    698 }
    699 
    700 
    701 void LCodeGen::DoGap(LGap* gap) {
    702   for (int i = LGap::FIRST_INNER_POSITION;
    703        i <= LGap::LAST_INNER_POSITION;
    704        i++) {
    705     LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
    706     LParallelMove* move = gap->GetParallelMove(inner_pos);
    707     if (move != NULL) DoParallelMove(move);
    708   }
    709 }
    710 
    711 
    712 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
    713   DoGap(instr);
    714 }
    715 
    716 
    717 void LCodeGen::DoParameter(LParameter* instr) {
    718   // Nothing to do.
    719 }
    720 
    721 
    722 void LCodeGen::DoCallStub(LCallStub* instr) {
    723   ASSERT(ToRegister(instr->result()).is(rax));
    724   switch (instr->hydrogen()->major_key()) {
    725     case CodeStub::RegExpConstructResult: {
    726       RegExpConstructResultStub stub;
    727       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    728       break;
    729     }
    730     case CodeStub::RegExpExec: {
    731       RegExpExecStub stub;
    732       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    733       break;
    734     }
    735     case CodeStub::SubString: {
    736       SubStringStub stub;
    737       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    738       break;
    739     }
    740     case CodeStub::NumberToString: {
    741       NumberToStringStub stub;
    742       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    743       break;
    744     }
    745     case CodeStub::StringAdd: {
    746       StringAddStub stub(NO_STRING_ADD_FLAGS);
    747       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    748       break;
    749     }
    750     case CodeStub::StringCompare: {
    751       StringCompareStub stub;
    752       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    753       break;
    754     }
    755     case CodeStub::TranscendentalCache: {
    756       TranscendentalCacheStub stub(instr->transcendental_type(),
    757                                    TranscendentalCacheStub::TAGGED);
    758       CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
    759       break;
    760     }
    761     default:
    762       UNREACHABLE();
    763   }
    764 }
    765 
    766 
    767 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
    768   // Nothing to do.
    769 }
    770 
    771 
    772 void LCodeGen::DoModI(LModI* instr) {
    773   if (instr->hydrogen()->HasPowerOf2Divisor()) {
    774     Register dividend = ToRegister(instr->InputAt(0));
    775 
    776     int32_t divisor =
    777         HConstant::cast(instr->hydrogen()->right())->Integer32Value();
    778 
    779     if (divisor < 0) divisor = -divisor;
    780 
    781     Label positive_dividend, done;
    782     __ testl(dividend, dividend);
    783     __ j(not_sign, &positive_dividend, Label::kNear);
    784     __ negl(dividend);
    785     __ andl(dividend, Immediate(divisor - 1));
    786     __ negl(dividend);
    787     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
    788       __ j(not_zero, &done, Label::kNear);
    789       DeoptimizeIf(no_condition, instr->environment());
    790     } else {
    791       __ jmp(&done, Label::kNear);
    792     }
    793     __ bind(&positive_dividend);
    794     __ andl(dividend, Immediate(divisor - 1));
    795     __ bind(&done);
    796   } else {
    797     Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
    798     Register left_reg = ToRegister(instr->InputAt(0));
    799     Register right_reg = ToRegister(instr->InputAt(1));
    800     Register result_reg = ToRegister(instr->result());
    801 
    802     ASSERT(left_reg.is(rax));
    803     ASSERT(result_reg.is(rdx));
    804     ASSERT(!right_reg.is(rax));
    805     ASSERT(!right_reg.is(rdx));
    806 
    807     // Check for x % 0.
    808     if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
    809       __ testl(right_reg, right_reg);
    810       DeoptimizeIf(zero, instr->environment());
    811     }
    812 
    813     __ testl(left_reg, left_reg);
    814     __ j(zero, &remainder_eq_dividend, Label::kNear);
    815     __ j(sign, &slow, Label::kNear);
    816 
    817     __ testl(right_reg, right_reg);
    818     __ j(not_sign, &both_positive, Label::kNear);
    819     // The sign of the divisor doesn't matter.
    820     __ neg(right_reg);
    821 
    822     __ bind(&both_positive);
    823     // If the dividend is smaller than the nonnegative
    824     // divisor, the dividend is the result.
    825     __ cmpl(left_reg, right_reg);
    826     __ j(less, &remainder_eq_dividend, Label::kNear);
    827 
    828     // Check if the divisor is a PowerOfTwo integer.
    829     Register scratch = ToRegister(instr->TempAt(0));
    830     __ movl(scratch, right_reg);
    831     __ subl(scratch, Immediate(1));
    832     __ testl(scratch, right_reg);
    833     __ j(not_zero, &do_subtraction, Label::kNear);
    834     __ andl(left_reg, scratch);
    835     __ jmp(&remainder_eq_dividend, Label::kNear);
    836 
    837     __ bind(&do_subtraction);
    838     const int kUnfolds = 3;
    839     // Try a few subtractions of the dividend.
    840     __ movl(scratch, left_reg);
    841     for (int i = 0; i < kUnfolds; i++) {
    842       // Reduce the dividend by the divisor.
    843       __ subl(left_reg, right_reg);
    844       // Check if the dividend is less than the divisor.
    845       __ cmpl(left_reg, right_reg);
    846       __ j(less, &remainder_eq_dividend, Label::kNear);
    847     }
    848     __ movl(left_reg, scratch);
    849 
    850     // Slow case, using idiv instruction.
    851     __ bind(&slow);
    852     // Sign extend eax to edx.
    853     // (We are using only the low 32 bits of the values.)
    854     __ cdq();
    855 
    856     // Check for (0 % -x) that will produce negative zero.
    857     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
    858       Label positive_left;
    859       Label done;
    860       __ testl(left_reg, left_reg);
    861       __ j(not_sign, &positive_left, Label::kNear);
    862       __ idivl(right_reg);
    863 
    864       // Test the remainder for 0, because then the result would be -0.
    865       __ testl(result_reg, result_reg);
    866       __ j(not_zero, &done, Label::kNear);
    867 
    868       DeoptimizeIf(no_condition, instr->environment());
    869       __ bind(&positive_left);
    870       __ idivl(right_reg);
    871       __ bind(&done);
    872     } else {
    873       __ idivl(right_reg);
    874     }
    875     __ jmp(&done, Label::kNear);
    876 
    877     __ bind(&remainder_eq_dividend);
    878     __ movl(result_reg, left_reg);
    879 
    880     __ bind(&done);
    881   }
    882 }
    883 
    884 
    885 void LCodeGen::DoDivI(LDivI* instr) {
    886   LOperand* right = instr->InputAt(1);
    887   ASSERT(ToRegister(instr->result()).is(rax));
    888   ASSERT(ToRegister(instr->InputAt(0)).is(rax));
    889   ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
    890   ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
    891 
    892   Register left_reg = rax;
    893 
    894   // Check for x / 0.
    895   Register right_reg = ToRegister(right);
    896   if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
    897     __ testl(right_reg, right_reg);
    898     DeoptimizeIf(zero, instr->environment());
    899   }
    900 
    901   // Check for (0 / -x) that will produce negative zero.
    902   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
    903     Label left_not_zero;
    904     __ testl(left_reg, left_reg);
    905     __ j(not_zero, &left_not_zero, Label::kNear);
    906     __ testl(right_reg, right_reg);
    907     DeoptimizeIf(sign, instr->environment());
    908     __ bind(&left_not_zero);
    909   }
    910 
    911   // Check for (-kMinInt / -1).
    912   if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
    913     Label left_not_min_int;
    914     __ cmpl(left_reg, Immediate(kMinInt));
    915     __ j(not_zero, &left_not_min_int, Label::kNear);
    916     __ cmpl(right_reg, Immediate(-1));
    917     DeoptimizeIf(zero, instr->environment());
    918     __ bind(&left_not_min_int);
    919   }
    920 
    921   // Sign extend to rdx.
    922   __ cdq();
    923   __ idivl(right_reg);
    924 
    925   // Deoptimize if remainder is not 0.
    926   __ testl(rdx, rdx);
    927   DeoptimizeIf(not_zero, instr->environment());
    928 }
    929 
    930 
    931 void LCodeGen::DoMulI(LMulI* instr) {
    932   Register left = ToRegister(instr->InputAt(0));
    933   LOperand* right = instr->InputAt(1);
    934 
    935   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
    936     __ movl(kScratchRegister, left);
    937   }
    938 
    939   bool can_overflow =
    940       instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
    941   if (right->IsConstantOperand()) {
    942     int right_value = ToInteger32(LConstantOperand::cast(right));
    943     if (right_value == -1) {
    944       __ negl(left);
    945     } else if (right_value == 0) {
    946       __ xorl(left, left);
    947     } else if (right_value == 2) {
    948       __ addl(left, left);
    949     } else if (!can_overflow) {
    950       // If the multiplication is known to not overflow, we
    951       // can use operations that don't set the overflow flag
    952       // correctly.
    953       switch (right_value) {
    954         case 1:
    955           // Do nothing.
    956           break;
    957         case 3:
    958           __ leal(left, Operand(left, left, times_2, 0));
    959           break;
    960         case 4:
    961           __ shll(left, Immediate(2));
    962           break;
    963         case 5:
    964           __ leal(left, Operand(left, left, times_4, 0));
    965           break;
    966         case 8:
    967           __ shll(left, Immediate(3));
    968           break;
    969         case 9:
    970           __ leal(left, Operand(left, left, times_8, 0));
    971           break;
    972         case 16:
    973           __ shll(left, Immediate(4));
    974           break;
    975         default:
    976           __ imull(left, left, Immediate(right_value));
    977           break;
    978       }
    979     } else {
    980       __ imull(left, left, Immediate(right_value));
    981     }
    982   } else if (right->IsStackSlot()) {
    983     __ imull(left, ToOperand(right));
    984   } else {
    985     __ imull(left, ToRegister(right));
    986   }
    987 
    988   if (can_overflow) {
    989     DeoptimizeIf(overflow, instr->environment());
    990   }
    991 
    992   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
    993     // Bail out if the result is supposed to be negative zero.
    994     Label done;
    995     __ testl(left, left);
    996     __ j(not_zero, &done, Label::kNear);
    997     if (right->IsConstantOperand()) {
    998       if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
    999         DeoptimizeIf(no_condition, instr->environment());
   1000       }
   1001     } else if (right->IsStackSlot()) {
   1002       __ orl(kScratchRegister, ToOperand(right));
   1003       DeoptimizeIf(sign, instr->environment());
   1004     } else {
   1005       // Test the non-zero operand for negative sign.
   1006       __ orl(kScratchRegister, ToRegister(right));
   1007       DeoptimizeIf(sign, instr->environment());
   1008     }
   1009     __ bind(&done);
   1010   }
   1011 }
   1012 
   1013 
   1014 void LCodeGen::DoBitI(LBitI* instr) {
   1015   LOperand* left = instr->InputAt(0);
   1016   LOperand* right = instr->InputAt(1);
   1017   ASSERT(left->Equals(instr->result()));
   1018   ASSERT(left->IsRegister());
   1019 
   1020   if (right->IsConstantOperand()) {
   1021     int right_operand = ToInteger32(LConstantOperand::cast(right));
   1022     switch (instr->op()) {
   1023       case Token::BIT_AND:
   1024         __ andl(ToRegister(left), Immediate(right_operand));
   1025         break;
   1026       case Token::BIT_OR:
   1027         __ orl(ToRegister(left), Immediate(right_operand));
   1028         break;
   1029       case Token::BIT_XOR:
   1030         __ xorl(ToRegister(left), Immediate(right_operand));
   1031         break;
   1032       default:
   1033         UNREACHABLE();
   1034         break;
   1035     }
   1036   } else if (right->IsStackSlot()) {
   1037     switch (instr->op()) {
   1038       case Token::BIT_AND:
   1039         __ andl(ToRegister(left), ToOperand(right));
   1040         break;
   1041       case Token::BIT_OR:
   1042         __ orl(ToRegister(left), ToOperand(right));
   1043         break;
   1044       case Token::BIT_XOR:
   1045         __ xorl(ToRegister(left), ToOperand(right));
   1046         break;
   1047       default:
   1048         UNREACHABLE();
   1049         break;
   1050     }
   1051   } else {
   1052     ASSERT(right->IsRegister());
   1053     switch (instr->op()) {
   1054       case Token::BIT_AND:
   1055         __ andl(ToRegister(left), ToRegister(right));
   1056         break;
   1057       case Token::BIT_OR:
   1058         __ orl(ToRegister(left), ToRegister(right));
   1059         break;
   1060       case Token::BIT_XOR:
   1061         __ xorl(ToRegister(left), ToRegister(right));
   1062         break;
   1063       default:
   1064         UNREACHABLE();
   1065         break;
   1066     }
   1067   }
   1068 }
   1069 
   1070 
   1071 void LCodeGen::DoShiftI(LShiftI* instr) {
   1072   LOperand* left = instr->InputAt(0);
   1073   LOperand* right = instr->InputAt(1);
   1074   ASSERT(left->Equals(instr->result()));
   1075   ASSERT(left->IsRegister());
   1076   if (right->IsRegister()) {
   1077     ASSERT(ToRegister(right).is(rcx));
   1078 
   1079     switch (instr->op()) {
   1080       case Token::SAR:
   1081         __ sarl_cl(ToRegister(left));
   1082         break;
   1083       case Token::SHR:
   1084         __ shrl_cl(ToRegister(left));
   1085         if (instr->can_deopt()) {
   1086           __ testl(ToRegister(left), ToRegister(left));
   1087           DeoptimizeIf(negative, instr->environment());
   1088         }
   1089         break;
   1090       case Token::SHL:
   1091         __ shll_cl(ToRegister(left));
   1092         break;
   1093       default:
   1094         UNREACHABLE();
   1095         break;
   1096     }
   1097   } else {
   1098     int value = ToInteger32(LConstantOperand::cast(right));
   1099     uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
   1100     switch (instr->op()) {
   1101       case Token::SAR:
   1102         if (shift_count != 0) {
   1103           __ sarl(ToRegister(left), Immediate(shift_count));
   1104         }
   1105         break;
   1106       case Token::SHR:
   1107         if (shift_count == 0 && instr->can_deopt()) {
   1108           __ testl(ToRegister(left), ToRegister(left));
   1109           DeoptimizeIf(negative, instr->environment());
   1110         } else {
   1111           __ shrl(ToRegister(left), Immediate(shift_count));
   1112         }
   1113         break;
   1114       case Token::SHL:
   1115         if (shift_count != 0) {
   1116           __ shll(ToRegister(left), Immediate(shift_count));
   1117         }
   1118         break;
   1119       default:
   1120         UNREACHABLE();
   1121         break;
   1122     }
   1123   }
   1124 }
   1125 
   1126 
   1127 void LCodeGen::DoSubI(LSubI* instr) {
   1128   LOperand* left = instr->InputAt(0);
   1129   LOperand* right = instr->InputAt(1);
   1130   ASSERT(left->Equals(instr->result()));
   1131 
   1132   if (right->IsConstantOperand()) {
   1133     __ subl(ToRegister(left),
   1134             Immediate(ToInteger32(LConstantOperand::cast(right))));
   1135   } else if (right->IsRegister()) {
   1136     __ subl(ToRegister(left), ToRegister(right));
   1137   } else {
   1138     __ subl(ToRegister(left), ToOperand(right));
   1139   }
   1140 
   1141   if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
   1142     DeoptimizeIf(overflow, instr->environment());
   1143   }
   1144 }
   1145 
   1146 
   1147 void LCodeGen::DoConstantI(LConstantI* instr) {
   1148   ASSERT(instr->result()->IsRegister());
   1149   __ Set(ToRegister(instr->result()), instr->value());
   1150 }
   1151 
   1152 
   1153 void LCodeGen::DoConstantD(LConstantD* instr) {
   1154   ASSERT(instr->result()->IsDoubleRegister());
   1155   XMMRegister res = ToDoubleRegister(instr->result());
   1156   double v = instr->value();
   1157   uint64_t int_val = BitCast<uint64_t, double>(v);
   1158   // Use xor to produce +0.0 in a fast and compact way, but avoid to
   1159   // do so if the constant is -0.0.
   1160   if (int_val == 0) {
   1161     __ xorps(res, res);
   1162   } else {
   1163     Register tmp = ToRegister(instr->TempAt(0));
   1164     __ Set(tmp, int_val);
   1165     __ movq(res, tmp);
   1166   }
   1167 }
   1168 
   1169 
   1170 void LCodeGen::DoConstantT(LConstantT* instr) {
   1171   Handle<Object> value = instr->value();
   1172   if (value->IsSmi()) {
   1173     __ Move(ToRegister(instr->result()), value);
   1174   } else {
   1175     __ LoadHeapObject(ToRegister(instr->result()),
   1176                       Handle<HeapObject>::cast(value));
   1177   }
   1178 }
   1179 
   1180 
   1181 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
   1182   Register result = ToRegister(instr->result());
   1183   Register array = ToRegister(instr->InputAt(0));
   1184   __ movq(result, FieldOperand(array, JSArray::kLengthOffset));
   1185 }
   1186 
   1187 
   1188 void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
   1189   Register result = ToRegister(instr->result());
   1190   Register array = ToRegister(instr->InputAt(0));
   1191   __ movq(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
   1192 }
   1193 
   1194 
   1195 void LCodeGen::DoElementsKind(LElementsKind* instr) {
   1196   Register result = ToRegister(instr->result());
   1197   Register input = ToRegister(instr->InputAt(0));
   1198 
   1199   // Load map into |result|.
   1200   __ movq(result, FieldOperand(input, HeapObject::kMapOffset));
   1201   // Load the map's "bit field 2" into |result|. We only need the first byte.
   1202   __ movzxbq(result, FieldOperand(result, Map::kBitField2Offset));
   1203   // Retrieve elements_kind from bit field 2.
   1204   __ and_(result, Immediate(Map::kElementsKindMask));
   1205   __ shr(result, Immediate(Map::kElementsKindShift));
   1206 }
   1207 
   1208 
   1209 void LCodeGen::DoValueOf(LValueOf* instr) {
   1210   Register input = ToRegister(instr->InputAt(0));
   1211   Register result = ToRegister(instr->result());
   1212   ASSERT(input.is(result));
   1213   Label done;
   1214   // If the object is a smi return the object.
   1215   __ JumpIfSmi(input, &done, Label::kNear);
   1216 
   1217   // If the object is not a value type, return the object.
   1218   __ CmpObjectType(input, JS_VALUE_TYPE, kScratchRegister);
   1219   __ j(not_equal, &done, Label::kNear);
   1220   __ movq(result, FieldOperand(input, JSValue::kValueOffset));
   1221 
   1222   __ bind(&done);
   1223 }
   1224 
   1225 
   1226 void LCodeGen::DoDateField(LDateField* instr) {
   1227   Register object = ToRegister(instr->InputAt(0));
   1228   Register result = ToRegister(instr->result());
   1229   Smi* index = instr->index();
   1230   Label runtime, done;
   1231   ASSERT(object.is(result));
   1232   ASSERT(object.is(rax));
   1233 
   1234 #ifdef DEBUG
   1235   __ AbortIfSmi(object);
   1236   __ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister);
   1237   __ Assert(equal, "Trying to get date field from non-date.");
   1238 #endif
   1239 
   1240   if (index->value() == 0) {
   1241     __ movq(result, FieldOperand(object, JSDate::kValueOffset));
   1242   } else {
   1243     if (index->value() < JSDate::kFirstUncachedField) {
   1244       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
   1245       __ movq(kScratchRegister, stamp);
   1246       __ cmpq(kScratchRegister, FieldOperand(object,
   1247                                              JSDate::kCacheStampOffset));
   1248       __ j(not_equal, &runtime, Label::kNear);
   1249       __ movq(result, FieldOperand(object, JSDate::kValueOffset +
   1250                                            kPointerSize * index->value()));
   1251       __ jmp(&done);
   1252     }
   1253     __ bind(&runtime);
   1254     __ PrepareCallCFunction(2);
   1255 #ifdef _WIN64
   1256   __ movq(rcx, object);
   1257   __ movq(rdx, index, RelocInfo::NONE);
   1258 #else
   1259   __ movq(rdi, object);
   1260   __ movq(rsi, index, RelocInfo::NONE);
   1261 #endif
   1262     __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
   1263     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   1264     __ bind(&done);
   1265   }
   1266 }
   1267 
   1268 
   1269 void LCodeGen::DoBitNotI(LBitNotI* instr) {
   1270   LOperand* input = instr->InputAt(0);
   1271   ASSERT(input->Equals(instr->result()));
   1272   __ not_(ToRegister(input));
   1273 }
   1274 
   1275 
   1276 void LCodeGen::DoThrow(LThrow* instr) {
   1277   __ push(ToRegister(instr->InputAt(0)));
   1278   CallRuntime(Runtime::kThrow, 1, instr);
   1279 
   1280   if (FLAG_debug_code) {
   1281     Comment("Unreachable code.");
   1282     __ int3();
   1283   }
   1284 }
   1285 
   1286 
   1287 void LCodeGen::DoAddI(LAddI* instr) {
   1288   LOperand* left = instr->InputAt(0);
   1289   LOperand* right = instr->InputAt(1);
   1290   ASSERT(left->Equals(instr->result()));
   1291 
   1292   if (right->IsConstantOperand()) {
   1293     __ addl(ToRegister(left),
   1294             Immediate(ToInteger32(LConstantOperand::cast(right))));
   1295   } else if (right->IsRegister()) {
   1296     __ addl(ToRegister(left), ToRegister(right));
   1297   } else {
   1298     __ addl(ToRegister(left), ToOperand(right));
   1299   }
   1300 
   1301   if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
   1302     DeoptimizeIf(overflow, instr->environment());
   1303   }
   1304 }
   1305 
   1306 
   1307 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
   1308   XMMRegister left = ToDoubleRegister(instr->InputAt(0));
   1309   XMMRegister right = ToDoubleRegister(instr->InputAt(1));
   1310   XMMRegister result = ToDoubleRegister(instr->result());
   1311   // All operations except MOD are computed in-place.
   1312   ASSERT(instr->op() == Token::MOD || left.is(result));
   1313   switch (instr->op()) {
   1314     case Token::ADD:
   1315       __ addsd(left, right);
   1316       break;
   1317     case Token::SUB:
   1318        __ subsd(left, right);
   1319        break;
   1320     case Token::MUL:
   1321       __ mulsd(left, right);
   1322       break;
   1323     case Token::DIV:
   1324       __ divsd(left, right);
   1325       break;
   1326     case Token::MOD:
   1327       __ PrepareCallCFunction(2);
   1328       __ movaps(xmm0, left);
   1329       ASSERT(right.is(xmm1));
   1330       __ CallCFunction(
   1331           ExternalReference::double_fp_operation(Token::MOD, isolate()), 2);
   1332       __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   1333       __ movaps(result, xmm0);
   1334       break;
   1335     default:
   1336       UNREACHABLE();
   1337       break;
   1338   }
   1339 }
   1340 
   1341 
   1342 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
   1343   ASSERT(ToRegister(instr->InputAt(0)).is(rdx));
   1344   ASSERT(ToRegister(instr->InputAt(1)).is(rax));
   1345   ASSERT(ToRegister(instr->result()).is(rax));
   1346 
   1347   BinaryOpStub stub(instr->op(), NO_OVERWRITE);
   1348   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   1349   __ nop();  // Signals no inlined code.
   1350 }
   1351 
   1352 
   1353 int LCodeGen::GetNextEmittedBlock(int block) {
   1354   for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
   1355     LLabel* label = chunk_->GetLabel(i);
   1356     if (!label->HasReplacement()) return i;
   1357   }
   1358   return -1;
   1359 }
   1360 
   1361 
   1362 void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
   1363   int next_block = GetNextEmittedBlock(current_block_);
   1364   right_block = chunk_->LookupDestination(right_block);
   1365   left_block = chunk_->LookupDestination(left_block);
   1366 
   1367   if (right_block == left_block) {
   1368     EmitGoto(left_block);
   1369   } else if (left_block == next_block) {
   1370     __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
   1371   } else if (right_block == next_block) {
   1372     __ j(cc, chunk_->GetAssemblyLabel(left_block));
   1373   } else {
   1374     __ j(cc, chunk_->GetAssemblyLabel(left_block));
   1375     if (cc != always) {
   1376       __ jmp(chunk_->GetAssemblyLabel(right_block));
   1377     }
   1378   }
   1379 }
   1380 
   1381 
   1382 void LCodeGen::DoBranch(LBranch* instr) {
   1383   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1384   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1385 
   1386   Representation r = instr->hydrogen()->value()->representation();
   1387   if (r.IsInteger32()) {
   1388     Register reg = ToRegister(instr->InputAt(0));
   1389     __ testl(reg, reg);
   1390     EmitBranch(true_block, false_block, not_zero);
   1391   } else if (r.IsDouble()) {
   1392     XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
   1393     __ xorps(xmm0, xmm0);
   1394     __ ucomisd(reg, xmm0);
   1395     EmitBranch(true_block, false_block, not_equal);
   1396   } else {
   1397     ASSERT(r.IsTagged());
   1398     Register reg = ToRegister(instr->InputAt(0));
   1399     HType type = instr->hydrogen()->value()->type();
   1400     if (type.IsBoolean()) {
   1401       __ CompareRoot(reg, Heap::kTrueValueRootIndex);
   1402       EmitBranch(true_block, false_block, equal);
   1403     } else if (type.IsSmi()) {
   1404       __ SmiCompare(reg, Smi::FromInt(0));
   1405       EmitBranch(true_block, false_block, not_equal);
   1406     } else {
   1407       Label* true_label = chunk_->GetAssemblyLabel(true_block);
   1408       Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1409 
   1410       ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
   1411       // Avoid deopts in the case where we've never executed this path before.
   1412       if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
   1413 
   1414       if (expected.Contains(ToBooleanStub::UNDEFINED)) {
   1415         // undefined -> false.
   1416         __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
   1417         __ j(equal, false_label);
   1418       }
   1419       if (expected.Contains(ToBooleanStub::BOOLEAN)) {
   1420         // true -> true.
   1421         __ CompareRoot(reg, Heap::kTrueValueRootIndex);
   1422         __ j(equal, true_label);
   1423         // false -> false.
   1424         __ CompareRoot(reg, Heap::kFalseValueRootIndex);
   1425         __ j(equal, false_label);
   1426       }
   1427       if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
   1428         // 'null' -> false.
   1429         __ CompareRoot(reg, Heap::kNullValueRootIndex);
   1430         __ j(equal, false_label);
   1431       }
   1432 
   1433       if (expected.Contains(ToBooleanStub::SMI)) {
   1434         // Smis: 0 -> false, all other -> true.
   1435         __ Cmp(reg, Smi::FromInt(0));
   1436         __ j(equal, false_label);
   1437         __ JumpIfSmi(reg, true_label);
   1438       } else if (expected.NeedsMap()) {
   1439         // If we need a map later and have a Smi -> deopt.
   1440         __ testb(reg, Immediate(kSmiTagMask));
   1441         DeoptimizeIf(zero, instr->environment());
   1442       }
   1443 
   1444       const Register map = kScratchRegister;
   1445       if (expected.NeedsMap()) {
   1446         __ movq(map, FieldOperand(reg, HeapObject::kMapOffset));
   1447 
   1448         if (expected.CanBeUndetectable()) {
   1449           // Undetectable -> false.
   1450           __ testb(FieldOperand(map, Map::kBitFieldOffset),
   1451                    Immediate(1 << Map::kIsUndetectable));
   1452           __ j(not_zero, false_label);
   1453         }
   1454       }
   1455 
   1456       if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
   1457         // spec object -> true.
   1458         __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
   1459         __ j(above_equal, true_label);
   1460       }
   1461 
   1462       if (expected.Contains(ToBooleanStub::STRING)) {
   1463         // String value -> false iff empty.
   1464         Label not_string;
   1465         __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
   1466         __ j(above_equal, &not_string, Label::kNear);
   1467         __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0));
   1468         __ j(not_zero, true_label);
   1469         __ jmp(false_label);
   1470         __ bind(&not_string);
   1471       }
   1472 
   1473       if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
   1474         // heap number -> false iff +0, -0, or NaN.
   1475         Label not_heap_number;
   1476         __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
   1477         __ j(not_equal, &not_heap_number, Label::kNear);
   1478         __ xorps(xmm0, xmm0);
   1479         __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
   1480         __ j(zero, false_label);
   1481         __ jmp(true_label);
   1482         __ bind(&not_heap_number);
   1483       }
   1484 
   1485       // We've seen something for the first time -> deopt.
   1486       DeoptimizeIf(no_condition, instr->environment());
   1487     }
   1488   }
   1489 }
   1490 
   1491 
   1492 void LCodeGen::EmitGoto(int block) {
   1493   block = chunk_->LookupDestination(block);
   1494   int next_block = GetNextEmittedBlock(current_block_);
   1495   if (block != next_block) {
   1496     __ jmp(chunk_->GetAssemblyLabel(block));
   1497   }
   1498 }
   1499 
   1500 
   1501 void LCodeGen::DoGoto(LGoto* instr) {
   1502   EmitGoto(instr->block_id());
   1503 }
   1504 
   1505 
   1506 inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
   1507   Condition cond = no_condition;
   1508   switch (op) {
   1509     case Token::EQ:
   1510     case Token::EQ_STRICT:
   1511       cond = equal;
   1512       break;
   1513     case Token::LT:
   1514       cond = is_unsigned ? below : less;
   1515       break;
   1516     case Token::GT:
   1517       cond = is_unsigned ? above : greater;
   1518       break;
   1519     case Token::LTE:
   1520       cond = is_unsigned ? below_equal : less_equal;
   1521       break;
   1522     case Token::GTE:
   1523       cond = is_unsigned ? above_equal : greater_equal;
   1524       break;
   1525     case Token::IN:
   1526     case Token::INSTANCEOF:
   1527     default:
   1528       UNREACHABLE();
   1529   }
   1530   return cond;
   1531 }
   1532 
   1533 
   1534 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
   1535   LOperand* left = instr->InputAt(0);
   1536   LOperand* right = instr->InputAt(1);
   1537   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1538   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1539   Condition cc = TokenToCondition(instr->op(), instr->is_double());
   1540 
   1541   if (left->IsConstantOperand() && right->IsConstantOperand()) {
   1542     // We can statically evaluate the comparison.
   1543     double left_val = ToDouble(LConstantOperand::cast(left));
   1544     double right_val = ToDouble(LConstantOperand::cast(right));
   1545     int next_block =
   1546       EvalComparison(instr->op(), left_val, right_val) ? true_block
   1547                                                        : false_block;
   1548     EmitGoto(next_block);
   1549   } else {
   1550     if (instr->is_double()) {
   1551       // Don't base result on EFLAGS when a NaN is involved. Instead
   1552       // jump to the false block.
   1553       __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
   1554       __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
   1555     } else {
   1556       int32_t value;
   1557       if (right->IsConstantOperand()) {
   1558         value = ToInteger32(LConstantOperand::cast(right));
   1559         __ cmpl(ToRegister(left), Immediate(value));
   1560       } else if (left->IsConstantOperand()) {
   1561         value = ToInteger32(LConstantOperand::cast(left));
   1562         if (right->IsRegister()) {
   1563           __ cmpl(ToRegister(right), Immediate(value));
   1564         } else {
   1565           __ cmpl(ToOperand(right), Immediate(value));
   1566         }
   1567         // We transposed the operands. Reverse the condition.
   1568         cc = ReverseCondition(cc);
   1569       } else {
   1570         if (right->IsRegister()) {
   1571           __ cmpl(ToRegister(left), ToRegister(right));
   1572         } else {
   1573           __ cmpl(ToRegister(left), ToOperand(right));
   1574         }
   1575       }
   1576     }
   1577     EmitBranch(true_block, false_block, cc);
   1578   }
   1579 }
   1580 
   1581 
   1582 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
   1583   Register left = ToRegister(instr->InputAt(0));
   1584   Register right = ToRegister(instr->InputAt(1));
   1585   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1586   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1587 
   1588   __ cmpq(left, right);
   1589   EmitBranch(true_block, false_block, equal);
   1590 }
   1591 
   1592 
   1593 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
   1594   Register left = ToRegister(instr->InputAt(0));
   1595   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1596   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1597 
   1598   __ cmpq(left, Immediate(instr->hydrogen()->right()));
   1599   EmitBranch(true_block, false_block, equal);
   1600 }
   1601 
   1602 
   1603 void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
   1604   Register reg = ToRegister(instr->InputAt(0));
   1605   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1606 
   1607   // If the expression is known to be untagged or a smi, then it's definitely
   1608   // not null, and it can't be a an undetectable object.
   1609   if (instr->hydrogen()->representation().IsSpecialization() ||
   1610       instr->hydrogen()->type().IsSmi()) {
   1611     EmitGoto(false_block);
   1612     return;
   1613   }
   1614 
   1615   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1616   Heap::RootListIndex nil_value = instr->nil() == kNullValue ?
   1617       Heap::kNullValueRootIndex :
   1618       Heap::kUndefinedValueRootIndex;
   1619   __ CompareRoot(reg, nil_value);
   1620   if (instr->kind() == kStrictEquality) {
   1621     EmitBranch(true_block, false_block, equal);
   1622   } else {
   1623     Heap::RootListIndex other_nil_value = instr->nil() == kNullValue ?
   1624         Heap::kUndefinedValueRootIndex :
   1625         Heap::kNullValueRootIndex;
   1626     Label* true_label = chunk_->GetAssemblyLabel(true_block);
   1627     Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1628     __ j(equal, true_label);
   1629     __ CompareRoot(reg, other_nil_value);
   1630     __ j(equal, true_label);
   1631     __ JumpIfSmi(reg, false_label);
   1632     // Check for undetectable objects by looking in the bit field in
   1633     // the map. The object has already been smi checked.
   1634     Register scratch = ToRegister(instr->TempAt(0));
   1635     __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
   1636     __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
   1637              Immediate(1 << Map::kIsUndetectable));
   1638     EmitBranch(true_block, false_block, not_zero);
   1639   }
   1640 }
   1641 
   1642 
   1643 Condition LCodeGen::EmitIsObject(Register input,
   1644                                  Label* is_not_object,
   1645                                  Label* is_object) {
   1646   ASSERT(!input.is(kScratchRegister));
   1647 
   1648   __ JumpIfSmi(input, is_not_object);
   1649 
   1650   __ CompareRoot(input, Heap::kNullValueRootIndex);
   1651   __ j(equal, is_object);
   1652 
   1653   __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
   1654   // Undetectable objects behave like undefined.
   1655   __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
   1656            Immediate(1 << Map::kIsUndetectable));
   1657   __ j(not_zero, is_not_object);
   1658 
   1659   __ movzxbl(kScratchRegister,
   1660              FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
   1661   __ cmpb(kScratchRegister, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
   1662   __ j(below, is_not_object);
   1663   __ cmpb(kScratchRegister, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
   1664   return below_equal;
   1665 }
   1666 
   1667 
   1668 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
   1669   Register reg = ToRegister(instr->InputAt(0));
   1670 
   1671   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1672   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1673   Label* true_label = chunk_->GetAssemblyLabel(true_block);
   1674   Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1675 
   1676   Condition true_cond = EmitIsObject(reg, false_label, true_label);
   1677 
   1678   EmitBranch(true_block, false_block, true_cond);
   1679 }
   1680 
   1681 
   1682 Condition LCodeGen::EmitIsString(Register input,
   1683                                  Register temp1,
   1684                                  Label* is_not_string) {
   1685   __ JumpIfSmi(input, is_not_string);
   1686   Condition cond =  masm_->IsObjectStringType(input, temp1, temp1);
   1687 
   1688   return cond;
   1689 }
   1690 
   1691 
   1692 void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
   1693   Register reg = ToRegister(instr->InputAt(0));
   1694   Register temp = ToRegister(instr->TempAt(0));
   1695 
   1696   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1697   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1698   Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1699 
   1700   Condition true_cond = EmitIsString(reg, temp, false_label);
   1701 
   1702   EmitBranch(true_block, false_block, true_cond);
   1703 }
   1704 
   1705 
   1706 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
   1707   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1708   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1709 
   1710   Condition is_smi;
   1711   if (instr->InputAt(0)->IsRegister()) {
   1712     Register input = ToRegister(instr->InputAt(0));
   1713     is_smi = masm()->CheckSmi(input);
   1714   } else {
   1715     Operand input = ToOperand(instr->InputAt(0));
   1716     is_smi = masm()->CheckSmi(input);
   1717   }
   1718   EmitBranch(true_block, false_block, is_smi);
   1719 }
   1720 
   1721 
   1722 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
   1723   Register input = ToRegister(instr->InputAt(0));
   1724   Register temp = ToRegister(instr->TempAt(0));
   1725 
   1726   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1727   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1728 
   1729   __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
   1730   __ movq(temp, FieldOperand(input, HeapObject::kMapOffset));
   1731   __ testb(FieldOperand(temp, Map::kBitFieldOffset),
   1732            Immediate(1 << Map::kIsUndetectable));
   1733   EmitBranch(true_block, false_block, not_zero);
   1734 }
   1735 
   1736 
   1737 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
   1738   Token::Value op = instr->op();
   1739   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1740   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1741 
   1742   Handle<Code> ic = CompareIC::GetUninitialized(op);
   1743   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   1744 
   1745   Condition condition = TokenToCondition(op, false);
   1746   __ testq(rax, rax);
   1747 
   1748   EmitBranch(true_block, false_block, condition);
   1749 }
   1750 
   1751 
   1752 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
   1753   InstanceType from = instr->from();
   1754   InstanceType to = instr->to();
   1755   if (from == FIRST_TYPE) return to;
   1756   ASSERT(from == to || to == LAST_TYPE);
   1757   return from;
   1758 }
   1759 
   1760 
   1761 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
   1762   InstanceType from = instr->from();
   1763   InstanceType to = instr->to();
   1764   if (from == to) return equal;
   1765   if (to == LAST_TYPE) return above_equal;
   1766   if (from == FIRST_TYPE) return below_equal;
   1767   UNREACHABLE();
   1768   return equal;
   1769 }
   1770 
   1771 
   1772 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
   1773   Register input = ToRegister(instr->InputAt(0));
   1774 
   1775   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1776   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1777 
   1778   Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1779 
   1780   __ JumpIfSmi(input, false_label);
   1781 
   1782   __ CmpObjectType(input, TestType(instr->hydrogen()), kScratchRegister);
   1783   EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
   1784 }
   1785 
   1786 
   1787 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
   1788   Register input = ToRegister(instr->InputAt(0));
   1789   Register result = ToRegister(instr->result());
   1790 
   1791   if (FLAG_debug_code) {
   1792     __ AbortIfNotString(input);
   1793   }
   1794 
   1795   __ movl(result, FieldOperand(input, String::kHashFieldOffset));
   1796   ASSERT(String::kHashShift >= kSmiTagSize);
   1797   __ IndexFromHash(result, result);
   1798 }
   1799 
   1800 
   1801 void LCodeGen::DoHasCachedArrayIndexAndBranch(
   1802     LHasCachedArrayIndexAndBranch* instr) {
   1803   Register input = ToRegister(instr->InputAt(0));
   1804 
   1805   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1806   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1807 
   1808   __ testl(FieldOperand(input, String::kHashFieldOffset),
   1809            Immediate(String::kContainsCachedArrayIndexMask));
   1810   EmitBranch(true_block, false_block, equal);
   1811 }
   1812 
   1813 
   1814 // Branches to a label or falls through with the answer in the z flag.
   1815 // Trashes the temp register.
   1816 void LCodeGen::EmitClassOfTest(Label* is_true,
   1817                                Label* is_false,
   1818                                Handle<String> class_name,
   1819                                Register input,
   1820                                Register temp,
   1821                                Register temp2) {
   1822   ASSERT(!input.is(temp));
   1823   ASSERT(!input.is(temp2));
   1824   ASSERT(!temp.is(temp2));
   1825 
   1826   __ JumpIfSmi(input, is_false);
   1827 
   1828   if (class_name->IsEqualTo(CStrVector("Function"))) {
   1829     // Assuming the following assertions, we can use the same compares to test
   1830     // for both being a function type and being in the object type range.
   1831     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   1832     STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   1833                   FIRST_SPEC_OBJECT_TYPE + 1);
   1834     STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   1835                   LAST_SPEC_OBJECT_TYPE - 1);
   1836     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
   1837     __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
   1838     __ j(below, is_false);
   1839     __ j(equal, is_true);
   1840     __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE);
   1841     __ j(equal, is_true);
   1842   } else {
   1843     // Faster code path to avoid two compares: subtract lower bound from the
   1844     // actual type and do a signed compare with the width of the type range.
   1845     __ movq(temp, FieldOperand(input, HeapObject::kMapOffset));
   1846     __ movzxbl(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
   1847     __ subq(temp2, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
   1848     __ cmpq(temp2, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
   1849                              FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
   1850     __ j(above, is_false);
   1851   }
   1852 
   1853   // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
   1854   // Check if the constructor in the map is a function.
   1855   __ movq(temp, FieldOperand(temp, Map::kConstructorOffset));
   1856 
   1857   // Objects with a non-function constructor have class 'Object'.
   1858   __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister);
   1859   if (class_name->IsEqualTo(CStrVector("Object"))) {
   1860     __ j(not_equal, is_true);
   1861   } else {
   1862     __ j(not_equal, is_false);
   1863   }
   1864 
   1865   // temp now contains the constructor function. Grab the
   1866   // instance class name from there.
   1867   __ movq(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
   1868   __ movq(temp, FieldOperand(temp,
   1869                              SharedFunctionInfo::kInstanceClassNameOffset));
   1870   // The class name we are testing against is a symbol because it's a literal.
   1871   // The name in the constructor is a symbol because of the way the context is
   1872   // booted.  This routine isn't expected to work for random API-created
   1873   // classes and it doesn't have to because you can't access it with natives
   1874   // syntax.  Since both sides are symbols it is sufficient to use an identity
   1875   // comparison.
   1876   ASSERT(class_name->IsSymbol());
   1877   __ Cmp(temp, class_name);
   1878   // End with the answer in the z flag.
   1879 }
   1880 
   1881 
   1882 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
   1883   Register input = ToRegister(instr->InputAt(0));
   1884   Register temp = ToRegister(instr->TempAt(0));
   1885   Register temp2 = ToRegister(instr->TempAt(1));
   1886   Handle<String> class_name = instr->hydrogen()->class_name();
   1887 
   1888   int true_block = chunk_->LookupDestination(instr->true_block_id());
   1889   int false_block = chunk_->LookupDestination(instr->false_block_id());
   1890 
   1891   Label* true_label = chunk_->GetAssemblyLabel(true_block);
   1892   Label* false_label = chunk_->GetAssemblyLabel(false_block);
   1893 
   1894   EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
   1895 
   1896   EmitBranch(true_block, false_block, equal);
   1897 }
   1898 
   1899 
   1900 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
   1901   Register reg = ToRegister(instr->InputAt(0));
   1902   int true_block = instr->true_block_id();
   1903   int false_block = instr->false_block_id();
   1904 
   1905   __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
   1906   EmitBranch(true_block, false_block, equal);
   1907 }
   1908 
   1909 
   1910 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
   1911   InstanceofStub stub(InstanceofStub::kNoFlags);
   1912   __ push(ToRegister(instr->InputAt(0)));
   1913   __ push(ToRegister(instr->InputAt(1)));
   1914   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   1915   Label true_value, done;
   1916   __ testq(rax, rax);
   1917   __ j(zero, &true_value, Label::kNear);
   1918   __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
   1919   __ jmp(&done, Label::kNear);
   1920   __ bind(&true_value);
   1921   __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
   1922   __ bind(&done);
   1923 }
   1924 
   1925 
   1926 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
   1927   class DeferredInstanceOfKnownGlobal: public LDeferredCode {
   1928    public:
   1929     DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
   1930                                   LInstanceOfKnownGlobal* instr)
   1931         : LDeferredCode(codegen), instr_(instr) { }
   1932     virtual void Generate() {
   1933       codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
   1934     }
   1935     virtual LInstruction* instr() { return instr_; }
   1936     Label* map_check() { return &map_check_; }
   1937    private:
   1938     LInstanceOfKnownGlobal* instr_;
   1939     Label map_check_;
   1940   };
   1941 
   1942 
   1943   DeferredInstanceOfKnownGlobal* deferred;
   1944   deferred = new DeferredInstanceOfKnownGlobal(this, instr);
   1945 
   1946   Label done, false_result;
   1947   Register object = ToRegister(instr->InputAt(0));
   1948 
   1949   // A Smi is not an instance of anything.
   1950   __ JumpIfSmi(object, &false_result);
   1951 
   1952   // This is the inlined call site instanceof cache. The two occurences of the
   1953   // hole value will be patched to the last map/result pair generated by the
   1954   // instanceof stub.
   1955   Label cache_miss;
   1956   // Use a temp register to avoid memory operands with variable lengths.
   1957   Register map = ToRegister(instr->TempAt(0));
   1958   __ movq(map, FieldOperand(object, HeapObject::kMapOffset));
   1959   __ bind(deferred->map_check());  // Label for calculating code patching.
   1960   Handle<JSGlobalPropertyCell> cache_cell =
   1961       factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
   1962   __ movq(kScratchRegister, cache_cell, RelocInfo::GLOBAL_PROPERTY_CELL);
   1963   __ cmpq(map, Operand(kScratchRegister, 0));
   1964   __ j(not_equal, &cache_miss, Label::kNear);
   1965   // Patched to load either true or false.
   1966   __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
   1967 #ifdef DEBUG
   1968   // Check that the code size between patch label and patch sites is invariant.
   1969   Label end_of_patched_code;
   1970   __ bind(&end_of_patched_code);
   1971   ASSERT(true);
   1972 #endif
   1973   __ jmp(&done);
   1974 
   1975   // The inlined call site cache did not match. Check for null and string
   1976   // before calling the deferred code.
   1977   __ bind(&cache_miss);  // Null is not an instance of anything.
   1978   __ CompareRoot(object, Heap::kNullValueRootIndex);
   1979   __ j(equal, &false_result, Label::kNear);
   1980 
   1981   // String values are not instances of anything.
   1982   __ JumpIfNotString(object, kScratchRegister, deferred->entry());
   1983 
   1984   __ bind(&false_result);
   1985   __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
   1986 
   1987   __ bind(deferred->exit());
   1988   __ bind(&done);
   1989 }
   1990 
   1991 
   1992 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
   1993                                                Label* map_check) {
   1994   {
   1995     PushSafepointRegistersScope scope(this);
   1996     InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
   1997         InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
   1998     InstanceofStub stub(flags);
   1999 
   2000     __ push(ToRegister(instr->InputAt(0)));
   2001     __ PushHeapObject(instr->function());
   2002 
   2003     static const int kAdditionalDelta = 10;
   2004     int delta =
   2005         masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
   2006     ASSERT(delta >= 0);
   2007     __ push_imm32(delta);
   2008 
   2009     // We are pushing three values on the stack but recording a
   2010     // safepoint with two arguments because stub is going to
   2011     // remove the third argument from the stack before jumping
   2012     // to instanceof builtin on the slow path.
   2013     CallCodeGeneric(stub.GetCode(),
   2014                     RelocInfo::CODE_TARGET,
   2015                     instr,
   2016                     RECORD_SAFEPOINT_WITH_REGISTERS,
   2017                     2);
   2018     ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
   2019     ASSERT(instr->HasDeoptimizationEnvironment());
   2020     LEnvironment* env = instr->deoptimization_environment();
   2021     safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
   2022     // Move result to a register that survives the end of the
   2023     // PushSafepointRegisterScope.
   2024     __ movq(kScratchRegister, rax);
   2025   }
   2026   __ testq(kScratchRegister, kScratchRegister);
   2027   Label load_false;
   2028   Label done;
   2029   __ j(not_zero, &load_false);
   2030   __ LoadRoot(rax, Heap::kTrueValueRootIndex);
   2031   __ jmp(&done);
   2032   __ bind(&load_false);
   2033   __ LoadRoot(rax, Heap::kFalseValueRootIndex);
   2034   __ bind(&done);
   2035 }
   2036 
   2037 
   2038 void LCodeGen::DoCmpT(LCmpT* instr) {
   2039   Token::Value op = instr->op();
   2040 
   2041   Handle<Code> ic = CompareIC::GetUninitialized(op);
   2042   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2043 
   2044   Condition condition = TokenToCondition(op, false);
   2045   Label true_value, done;
   2046   __ testq(rax, rax);
   2047   __ j(condition, &true_value, Label::kNear);
   2048   __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
   2049   __ jmp(&done, Label::kNear);
   2050   __ bind(&true_value);
   2051   __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
   2052   __ bind(&done);
   2053 }
   2054 
   2055 
   2056 void LCodeGen::DoReturn(LReturn* instr) {
   2057   if (FLAG_trace) {
   2058     // Preserve the return value on the stack and rely on the runtime
   2059     // call to return the value in the same register.
   2060     __ push(rax);
   2061     __ CallRuntime(Runtime::kTraceExit, 1);
   2062   }
   2063   __ movq(rsp, rbp);
   2064   __ pop(rbp);
   2065   __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
   2066 }
   2067 
   2068 
   2069 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
   2070   Register result = ToRegister(instr->result());
   2071   __ LoadGlobalCell(result, instr->hydrogen()->cell());
   2072   if (instr->hydrogen()->RequiresHoleCheck()) {
   2073     __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
   2074     DeoptimizeIf(equal, instr->environment());
   2075   }
   2076 }
   2077 
   2078 
   2079 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   2080   ASSERT(ToRegister(instr->global_object()).is(rax));
   2081   ASSERT(ToRegister(instr->result()).is(rax));
   2082 
   2083   __ Move(rcx, instr->name());
   2084   RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
   2085                                                RelocInfo::CODE_TARGET_CONTEXT;
   2086   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2087   CallCode(ic, mode, instr);
   2088 }
   2089 
   2090 
   2091 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
   2092   Register value = ToRegister(instr->value());
   2093   Handle<JSGlobalPropertyCell> cell_handle = instr->hydrogen()->cell();
   2094 
   2095   // If the cell we are storing to contains the hole it could have
   2096   // been deleted from the property dictionary. In that case, we need
   2097   // to update the property details in the property dictionary to mark
   2098   // it as no longer deleted. We deoptimize in that case.
   2099   if (instr->hydrogen()->RequiresHoleCheck()) {
   2100     // We have a temp because CompareRoot might clobber kScratchRegister.
   2101     Register cell = ToRegister(instr->TempAt(0));
   2102     ASSERT(!value.is(cell));
   2103     __ movq(cell, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL);
   2104     __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex);
   2105     DeoptimizeIf(equal, instr->environment());
   2106     // Store the value.
   2107     __ movq(Operand(cell, 0), value);
   2108   } else {
   2109     // Store the value.
   2110     __ movq(kScratchRegister, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL);
   2111     __ movq(Operand(kScratchRegister, 0), value);
   2112   }
   2113   // Cells are always rescanned, so no write barrier here.
   2114 }
   2115 
   2116 
   2117 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
   2118   ASSERT(ToRegister(instr->global_object()).is(rdx));
   2119   ASSERT(ToRegister(instr->value()).is(rax));
   2120 
   2121   __ Move(rcx, instr->name());
   2122   Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
   2123       ? isolate()->builtins()->StoreIC_Initialize_Strict()
   2124       : isolate()->builtins()->StoreIC_Initialize();
   2125   CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
   2126 }
   2127 
   2128 
   2129 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
   2130   Register context = ToRegister(instr->context());
   2131   Register result = ToRegister(instr->result());
   2132   __ movq(result, ContextOperand(context, instr->slot_index()));
   2133   if (instr->hydrogen()->RequiresHoleCheck()) {
   2134     __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
   2135     if (instr->hydrogen()->DeoptimizesOnHole()) {
   2136       DeoptimizeIf(equal, instr->environment());
   2137     } else {
   2138       Label is_not_hole;
   2139       __ j(not_equal, &is_not_hole, Label::kNear);
   2140       __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   2141       __ bind(&is_not_hole);
   2142     }
   2143   }
   2144 }
   2145 
   2146 
   2147 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
   2148   Register context = ToRegister(instr->context());
   2149   Register value = ToRegister(instr->value());
   2150 
   2151   Operand target = ContextOperand(context, instr->slot_index());
   2152 
   2153   Label skip_assignment;
   2154   if (instr->hydrogen()->RequiresHoleCheck()) {
   2155     __ CompareRoot(target, Heap::kTheHoleValueRootIndex);
   2156     if (instr->hydrogen()->DeoptimizesOnHole()) {
   2157       DeoptimizeIf(equal, instr->environment());
   2158     } else {
   2159       __ j(not_equal, &skip_assignment);
   2160     }
   2161   }
   2162   __ movq(target, value);
   2163 
   2164   if (instr->hydrogen()->NeedsWriteBarrier()) {
   2165     HType type = instr->hydrogen()->value()->type();
   2166     SmiCheck check_needed =
   2167         type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
   2168     int offset = Context::SlotOffset(instr->slot_index());
   2169     Register scratch = ToRegister(instr->TempAt(0));
   2170     __ RecordWriteContextSlot(context,
   2171                               offset,
   2172                               value,
   2173                               scratch,
   2174                               kSaveFPRegs,
   2175                               EMIT_REMEMBERED_SET,
   2176                               check_needed);
   2177   }
   2178 
   2179   __ bind(&skip_assignment);
   2180 }
   2181 
   2182 
   2183 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
   2184   Register object = ToRegister(instr->InputAt(0));
   2185   Register result = ToRegister(instr->result());
   2186   if (instr->hydrogen()->is_in_object()) {
   2187     __ movq(result, FieldOperand(object, instr->hydrogen()->offset()));
   2188   } else {
   2189     __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
   2190     __ movq(result, FieldOperand(result, instr->hydrogen()->offset()));
   2191   }
   2192 }
   2193 
   2194 
   2195 void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
   2196                                                Register object,
   2197                                                Handle<Map> type,
   2198                                                Handle<String> name) {
   2199   LookupResult lookup(isolate());
   2200   type->LookupInDescriptors(NULL, *name, &lookup);
   2201   ASSERT(lookup.IsFound() &&
   2202          (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
   2203   if (lookup.type() == FIELD) {
   2204     int index = lookup.GetLocalFieldIndexFromMap(*type);
   2205     int offset = index * kPointerSize;
   2206     if (index < 0) {
   2207       // Negative property indices are in-object properties, indexed
   2208       // from the end of the fixed part of the object.
   2209       __ movq(result, FieldOperand(object, offset + type->instance_size()));
   2210     } else {
   2211       // Non-negative property indices are in the properties array.
   2212       __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
   2213       __ movq(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
   2214     }
   2215   } else {
   2216     Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
   2217     __ LoadHeapObject(result, function);
   2218   }
   2219 }
   2220 
   2221 
   2222 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
   2223   Register object = ToRegister(instr->object());
   2224   Register result = ToRegister(instr->result());
   2225 
   2226   int map_count = instr->hydrogen()->types()->length();
   2227   Handle<String> name = instr->hydrogen()->name();
   2228 
   2229   if (map_count == 0) {
   2230     ASSERT(instr->hydrogen()->need_generic());
   2231     __ Move(rcx, instr->hydrogen()->name());
   2232     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2233     CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2234   } else {
   2235     Label done;
   2236     for (int i = 0; i < map_count - 1; ++i) {
   2237       Handle<Map> map = instr->hydrogen()->types()->at(i);
   2238       Label next;
   2239       __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
   2240       __ j(not_equal, &next, Label::kNear);
   2241       EmitLoadFieldOrConstantFunction(result, object, map, name);
   2242       __ jmp(&done, Label::kNear);
   2243       __ bind(&next);
   2244     }
   2245     Handle<Map> map = instr->hydrogen()->types()->last();
   2246     __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
   2247     if (instr->hydrogen()->need_generic()) {
   2248       Label generic;
   2249       __ j(not_equal, &generic, Label::kNear);
   2250       EmitLoadFieldOrConstantFunction(result, object, map, name);
   2251       __ jmp(&done, Label::kNear);
   2252       __ bind(&generic);
   2253       __ Move(rcx, instr->hydrogen()->name());
   2254       Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2255       CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2256     } else {
   2257       DeoptimizeIf(not_equal, instr->environment());
   2258       EmitLoadFieldOrConstantFunction(result, object, map, name);
   2259     }
   2260     __ bind(&done);
   2261   }
   2262 }
   2263 
   2264 
   2265 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
   2266   ASSERT(ToRegister(instr->object()).is(rax));
   2267   ASSERT(ToRegister(instr->result()).is(rax));
   2268 
   2269   __ Move(rcx, instr->name());
   2270   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2271   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2272 }
   2273 
   2274 
   2275 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
   2276   Register function = ToRegister(instr->function());
   2277   Register result = ToRegister(instr->result());
   2278 
   2279   // Check that the function really is a function.
   2280   __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
   2281   DeoptimizeIf(not_equal, instr->environment());
   2282 
   2283   // Check whether the function has an instance prototype.
   2284   Label non_instance;
   2285   __ testb(FieldOperand(result, Map::kBitFieldOffset),
   2286            Immediate(1 << Map::kHasNonInstancePrototype));
   2287   __ j(not_zero, &non_instance, Label::kNear);
   2288 
   2289   // Get the prototype or initial map from the function.
   2290   __ movq(result,
   2291          FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2292 
   2293   // Check that the function has a prototype or an initial map.
   2294   __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
   2295   DeoptimizeIf(equal, instr->environment());
   2296 
   2297   // If the function does not have an initial map, we're done.
   2298   Label done;
   2299   __ CmpObjectType(result, MAP_TYPE, kScratchRegister);
   2300   __ j(not_equal, &done, Label::kNear);
   2301 
   2302   // Get the prototype from the initial map.
   2303   __ movq(result, FieldOperand(result, Map::kPrototypeOffset));
   2304   __ jmp(&done, Label::kNear);
   2305 
   2306   // Non-instance prototype: Fetch prototype from constructor field
   2307   // in the function's map.
   2308   __ bind(&non_instance);
   2309   __ movq(result, FieldOperand(result, Map::kConstructorOffset));
   2310 
   2311   // All done.
   2312   __ bind(&done);
   2313 }
   2314 
   2315 
   2316 void LCodeGen::DoLoadElements(LLoadElements* instr) {
   2317   Register result = ToRegister(instr->result());
   2318   Register input = ToRegister(instr->InputAt(0));
   2319   __ movq(result, FieldOperand(input, JSObject::kElementsOffset));
   2320   if (FLAG_debug_code) {
   2321     Label done, ok, fail;
   2322     __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
   2323                    Heap::kFixedArrayMapRootIndex);
   2324     __ j(equal, &done, Label::kNear);
   2325     __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
   2326                    Heap::kFixedCOWArrayMapRootIndex);
   2327     __ j(equal, &done, Label::kNear);
   2328     Register temp((result.is(rax)) ? rbx : rax);
   2329     __ push(temp);
   2330     __ movq(temp, FieldOperand(result, HeapObject::kMapOffset));
   2331     __ movzxbq(temp, FieldOperand(temp, Map::kBitField2Offset));
   2332     __ and_(temp, Immediate(Map::kElementsKindMask));
   2333     __ shr(temp, Immediate(Map::kElementsKindShift));
   2334     __ cmpl(temp, Immediate(FAST_ELEMENTS));
   2335     __ j(equal, &ok, Label::kNear);
   2336     __ cmpl(temp, Immediate(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
   2337     __ j(less, &fail, Label::kNear);
   2338     __ cmpl(temp, Immediate(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
   2339     __ j(less_equal, &ok, Label::kNear);
   2340     __ bind(&fail);
   2341     __ Abort("Check for fast or external elements failed");
   2342     __ bind(&ok);
   2343     __ pop(temp);
   2344     __ bind(&done);
   2345   }
   2346 }
   2347 
   2348 
   2349 void LCodeGen::DoLoadExternalArrayPointer(
   2350     LLoadExternalArrayPointer* instr) {
   2351   Register result = ToRegister(instr->result());
   2352   Register input = ToRegister(instr->InputAt(0));
   2353   __ movq(result, FieldOperand(input,
   2354                                ExternalPixelArray::kExternalPointerOffset));
   2355 }
   2356 
   2357 
   2358 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
   2359   Register arguments = ToRegister(instr->arguments());
   2360   Register length = ToRegister(instr->length());
   2361   Register result = ToRegister(instr->result());
   2362 
   2363   if (instr->index()->IsRegister()) {
   2364     __ subl(length, ToRegister(instr->index()));
   2365   } else {
   2366     __ subl(length, ToOperand(instr->index()));
   2367   }
   2368   DeoptimizeIf(below_equal, instr->environment());
   2369 
   2370   // There are two words between the frame pointer and the last argument.
   2371   // Subtracting from length accounts for one of them add one more.
   2372   __ movq(result, Operand(arguments, length, times_pointer_size, kPointerSize));
   2373 }
   2374 
   2375 
   2376 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
   2377   Register result = ToRegister(instr->result());
   2378 
   2379   // Load the result.
   2380   __ movq(result,
   2381           BuildFastArrayOperand(instr->elements(), instr->key(),
   2382                                 FAST_ELEMENTS,
   2383                                 FixedArray::kHeaderSize - kHeapObjectTag));
   2384 
   2385   // Check for the hole value.
   2386   if (instr->hydrogen()->RequiresHoleCheck()) {
   2387     __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
   2388     DeoptimizeIf(equal, instr->environment());
   2389   }
   2390 }
   2391 
   2392 
   2393 void LCodeGen::DoLoadKeyedFastDoubleElement(
   2394     LLoadKeyedFastDoubleElement* instr) {
   2395   XMMRegister result(ToDoubleRegister(instr->result()));
   2396 
   2397   int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
   2398       sizeof(kHoleNanLower32);
   2399   Operand hole_check_operand = BuildFastArrayOperand(
   2400       instr->elements(),
   2401       instr->key(),
   2402       FAST_DOUBLE_ELEMENTS,
   2403       offset);
   2404   __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32));
   2405   DeoptimizeIf(equal, instr->environment());
   2406 
   2407   Operand double_load_operand = BuildFastArrayOperand(
   2408       instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
   2409       FixedDoubleArray::kHeaderSize - kHeapObjectTag);
   2410   __ movsd(result, double_load_operand);
   2411 }
   2412 
   2413 
   2414 Operand LCodeGen::BuildFastArrayOperand(
   2415     LOperand* elements_pointer,
   2416     LOperand* key,
   2417     ElementsKind elements_kind,
   2418     uint32_t offset) {
   2419   Register elements_pointer_reg = ToRegister(elements_pointer);
   2420   int shift_size = ElementsKindToShiftSize(elements_kind);
   2421   if (key->IsConstantOperand()) {
   2422     int constant_value = ToInteger32(LConstantOperand::cast(key));
   2423     if (constant_value & 0xF0000000) {
   2424       Abort("array index constant value too big");
   2425     }
   2426     return Operand(elements_pointer_reg,
   2427                    constant_value * (1 << shift_size) + offset);
   2428   } else {
   2429     ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
   2430     return Operand(elements_pointer_reg, ToRegister(key),
   2431                    scale_factor, offset);
   2432   }
   2433 }
   2434 
   2435 
   2436 void LCodeGen::DoLoadKeyedSpecializedArrayElement(
   2437     LLoadKeyedSpecializedArrayElement* instr) {
   2438   ElementsKind elements_kind = instr->elements_kind();
   2439   Operand operand(BuildFastArrayOperand(instr->external_pointer(),
   2440                                         instr->key(), elements_kind, 0));
   2441   if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
   2442     XMMRegister result(ToDoubleRegister(instr->result()));
   2443     __ movss(result, operand);
   2444     __ cvtss2sd(result, result);
   2445   } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
   2446     __ movsd(ToDoubleRegister(instr->result()), operand);
   2447   } else {
   2448     Register result(ToRegister(instr->result()));
   2449     switch (elements_kind) {
   2450       case EXTERNAL_BYTE_ELEMENTS:
   2451         __ movsxbq(result, operand);
   2452         break;
   2453       case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
   2454       case EXTERNAL_PIXEL_ELEMENTS:
   2455         __ movzxbq(result, operand);
   2456         break;
   2457       case EXTERNAL_SHORT_ELEMENTS:
   2458         __ movsxwq(result, operand);
   2459         break;
   2460       case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
   2461         __ movzxwq(result, operand);
   2462         break;
   2463       case EXTERNAL_INT_ELEMENTS:
   2464         __ movsxlq(result, operand);
   2465         break;
   2466       case EXTERNAL_UNSIGNED_INT_ELEMENTS:
   2467         __ movl(result, operand);
   2468         __ testl(result, result);
   2469         // TODO(danno): we could be more clever here, perhaps having a special
   2470         // version of the stub that detects if the overflow case actually
   2471         // happens, and generate code that returns a double rather than int.
   2472         DeoptimizeIf(negative, instr->environment());
   2473         break;
   2474       case EXTERNAL_FLOAT_ELEMENTS:
   2475       case EXTERNAL_DOUBLE_ELEMENTS:
   2476       case FAST_ELEMENTS:
   2477       case FAST_SMI_ONLY_ELEMENTS:
   2478       case FAST_DOUBLE_ELEMENTS:
   2479       case DICTIONARY_ELEMENTS:
   2480       case NON_STRICT_ARGUMENTS_ELEMENTS:
   2481         UNREACHABLE();
   2482         break;
   2483     }
   2484   }
   2485 }
   2486 
   2487 
   2488 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
   2489   ASSERT(ToRegister(instr->object()).is(rdx));
   2490   ASSERT(ToRegister(instr->key()).is(rax));
   2491 
   2492   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   2493   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   2494 }
   2495 
   2496 
   2497 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
   2498   Register result = ToRegister(instr->result());
   2499 
   2500   // Check for arguments adapter frame.
   2501   Label done, adapted;
   2502   __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   2503   __ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
   2504          Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   2505   __ j(equal, &adapted, Label::kNear);
   2506 
   2507   // No arguments adaptor frame.
   2508   __ movq(result, rbp);
   2509   __ jmp(&done, Label::kNear);
   2510 
   2511   // Arguments adaptor frame present.
   2512   __ bind(&adapted);
   2513   __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   2514 
   2515   // Result is the frame pointer for the frame if not adapted and for the real
   2516   // frame below the adaptor frame if adapted.
   2517   __ bind(&done);
   2518 }
   2519 
   2520 
   2521 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
   2522   Register result = ToRegister(instr->result());
   2523 
   2524   Label done;
   2525 
   2526   // If no arguments adaptor frame the number of arguments is fixed.
   2527   if (instr->InputAt(0)->IsRegister()) {
   2528     __ cmpq(rbp, ToRegister(instr->InputAt(0)));
   2529   } else {
   2530     __ cmpq(rbp, ToOperand(instr->InputAt(0)));
   2531   }
   2532   __ movl(result, Immediate(scope()->num_parameters()));
   2533   __ j(equal, &done, Label::kNear);
   2534 
   2535   // Arguments adaptor frame present. Get argument length from there.
   2536   __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   2537   __ SmiToInteger32(result,
   2538                     Operand(result,
   2539                             ArgumentsAdaptorFrameConstants::kLengthOffset));
   2540 
   2541   // Argument length is in result register.
   2542   __ bind(&done);
   2543 }
   2544 
   2545 
   2546 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
   2547   Register receiver = ToRegister(instr->receiver());
   2548   Register function = ToRegister(instr->function());
   2549 
   2550   // If the receiver is null or undefined, we have to pass the global
   2551   // object as a receiver to normal functions. Values have to be
   2552   // passed unchanged to builtins and strict-mode functions.
   2553   Label global_object, receiver_ok;
   2554 
   2555   // Do not transform the receiver to object for strict mode
   2556   // functions.
   2557   __ movq(kScratchRegister,
   2558           FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
   2559   __ testb(FieldOperand(kScratchRegister,
   2560                         SharedFunctionInfo::kStrictModeByteOffset),
   2561            Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
   2562   __ j(not_equal, &receiver_ok, Label::kNear);
   2563 
   2564   // Do not transform the receiver to object for builtins.
   2565   __ testb(FieldOperand(kScratchRegister,
   2566                         SharedFunctionInfo::kNativeByteOffset),
   2567            Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
   2568   __ j(not_equal, &receiver_ok, Label::kNear);
   2569 
   2570   // Normal function. Replace undefined or null with global receiver.
   2571   __ CompareRoot(receiver, Heap::kNullValueRootIndex);
   2572   __ j(equal, &global_object, Label::kNear);
   2573   __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
   2574   __ j(equal, &global_object, Label::kNear);
   2575 
   2576   // The receiver should be a JS object.
   2577   Condition is_smi = __ CheckSmi(receiver);
   2578   DeoptimizeIf(is_smi, instr->environment());
   2579   __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
   2580   DeoptimizeIf(below, instr->environment());
   2581   __ jmp(&receiver_ok, Label::kNear);
   2582 
   2583   __ bind(&global_object);
   2584   // TODO(kmillikin): We have a hydrogen value for the global object.  See
   2585   // if it's better to use it than to explicitly fetch it from the context
   2586   // here.
   2587   __ movq(receiver, ContextOperand(rsi, Context::GLOBAL_INDEX));
   2588   __ movq(receiver,
   2589           FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
   2590   __ bind(&receiver_ok);
   2591 }
   2592 
   2593 
   2594 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
   2595   Register receiver = ToRegister(instr->receiver());
   2596   Register function = ToRegister(instr->function());
   2597   Register length = ToRegister(instr->length());
   2598   Register elements = ToRegister(instr->elements());
   2599   ASSERT(receiver.is(rax));  // Used for parameter count.
   2600   ASSERT(function.is(rdi));  // Required by InvokeFunction.
   2601   ASSERT(ToRegister(instr->result()).is(rax));
   2602 
   2603   // Copy the arguments to this function possibly from the
   2604   // adaptor frame below it.
   2605   const uint32_t kArgumentsLimit = 1 * KB;
   2606   __ cmpq(length, Immediate(kArgumentsLimit));
   2607   DeoptimizeIf(above, instr->environment());
   2608 
   2609   __ push(receiver);
   2610   __ movq(receiver, length);
   2611 
   2612   // Loop through the arguments pushing them onto the execution
   2613   // stack.
   2614   Label invoke, loop;
   2615   // length is a small non-negative integer, due to the test above.
   2616   __ testl(length, length);
   2617   __ j(zero, &invoke, Label::kNear);
   2618   __ bind(&loop);
   2619   __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
   2620   __ decl(length);
   2621   __ j(not_zero, &loop);
   2622 
   2623   // Invoke the function.
   2624   __ bind(&invoke);
   2625   ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
   2626   LPointerMap* pointers = instr->pointer_map();
   2627   RecordPosition(pointers->position());
   2628   SafepointGenerator safepoint_generator(
   2629       this, pointers, Safepoint::kLazyDeopt);
   2630   ParameterCount actual(rax);
   2631   __ InvokeFunction(function, actual, CALL_FUNCTION,
   2632                     safepoint_generator, CALL_AS_METHOD);
   2633   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2634 }
   2635 
   2636 
   2637 void LCodeGen::DoPushArgument(LPushArgument* instr) {
   2638   LOperand* argument = instr->InputAt(0);
   2639   EmitPushTaggedOperand(argument);
   2640 }
   2641 
   2642 
   2643 void LCodeGen::DoThisFunction(LThisFunction* instr) {
   2644   Register result = ToRegister(instr->result());
   2645   __ LoadHeapObject(result, instr->hydrogen()->closure());
   2646 }
   2647 
   2648 
   2649 void LCodeGen::DoContext(LContext* instr) {
   2650   Register result = ToRegister(instr->result());
   2651   __ movq(result, rsi);
   2652 }
   2653 
   2654 
   2655 void LCodeGen::DoOuterContext(LOuterContext* instr) {
   2656   Register context = ToRegister(instr->context());
   2657   Register result = ToRegister(instr->result());
   2658   __ movq(result,
   2659           Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
   2660 }
   2661 
   2662 
   2663 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
   2664   __ push(rsi);  // The context is the first argument.
   2665   __ PushHeapObject(instr->hydrogen()->pairs());
   2666   __ Push(Smi::FromInt(instr->hydrogen()->flags()));
   2667   CallRuntime(Runtime::kDeclareGlobals, 3, instr);
   2668 }
   2669 
   2670 
   2671 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
   2672   Register result = ToRegister(instr->result());
   2673   __ movq(result, GlobalObjectOperand());
   2674 }
   2675 
   2676 
   2677 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
   2678   Register global = ToRegister(instr->global());
   2679   Register result = ToRegister(instr->result());
   2680   __ movq(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
   2681 }
   2682 
   2683 
   2684 void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
   2685                                  int arity,
   2686                                  LInstruction* instr,
   2687                                  CallKind call_kind) {
   2688   bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
   2689       function->shared()->formal_parameter_count() == arity;
   2690 
   2691   LPointerMap* pointers = instr->pointer_map();
   2692   RecordPosition(pointers->position());
   2693 
   2694   if (can_invoke_directly) {
   2695     __ LoadHeapObject(rdi, function);
   2696 
   2697     // Change context if needed.
   2698     bool change_context =
   2699         (info()->closure()->context() != function->context()) ||
   2700         scope()->contains_with() ||
   2701         (scope()->num_heap_slots() > 0);
   2702     if (change_context) {
   2703       __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
   2704     }
   2705 
   2706     // Set rax to arguments count if adaption is not needed. Assumes that rax
   2707     // is available to write to at this point.
   2708     if (!function->NeedsArgumentsAdaption()) {
   2709       __ Set(rax, arity);
   2710     }
   2711 
   2712     // Invoke function.
   2713     __ SetCallKind(rcx, call_kind);
   2714     if (*function == *info()->closure()) {
   2715       __ CallSelf();
   2716     } else {
   2717       __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
   2718     }
   2719 
   2720     // Set up deoptimization.
   2721     RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
   2722   } else {
   2723     // We need to adapt arguments.
   2724     SafepointGenerator generator(
   2725         this, pointers, Safepoint::kLazyDeopt);
   2726     ParameterCount count(arity);
   2727     __ InvokeFunction(function, count, CALL_FUNCTION, generator, call_kind);
   2728   }
   2729 
   2730   // Restore context.
   2731   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2732 }
   2733 
   2734 
   2735 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
   2736   ASSERT(ToRegister(instr->result()).is(rax));
   2737   CallKnownFunction(instr->function(),
   2738                     instr->arity(),
   2739                     instr,
   2740                     CALL_AS_METHOD);
   2741 }
   2742 
   2743 
   2744 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
   2745   Register input_reg = ToRegister(instr->InputAt(0));
   2746   __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
   2747                  Heap::kHeapNumberMapRootIndex);
   2748   DeoptimizeIf(not_equal, instr->environment());
   2749 
   2750   Label done;
   2751   Register tmp = input_reg.is(rax) ? rcx : rax;
   2752   Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx;
   2753 
   2754   // Preserve the value of all registers.
   2755   PushSafepointRegistersScope scope(this);
   2756 
   2757   Label negative;
   2758   __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
   2759   // Check the sign of the argument. If the argument is positive, just
   2760   // return it. We do not need to patch the stack since |input| and
   2761   // |result| are the same register and |input| will be restored
   2762   // unchanged by popping safepoint registers.
   2763   __ testl(tmp, Immediate(HeapNumber::kSignMask));
   2764   __ j(not_zero, &negative);
   2765   __ jmp(&done);
   2766 
   2767   __ bind(&negative);
   2768 
   2769   Label allocated, slow;
   2770   __ AllocateHeapNumber(tmp, tmp2, &slow);
   2771   __ jmp(&allocated);
   2772 
   2773   // Slow case: Call the runtime system to do the number allocation.
   2774   __ bind(&slow);
   2775 
   2776   CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
   2777   // Set the pointer to the new heap number in tmp.
   2778   if (!tmp.is(rax)) {
   2779     __ movq(tmp, rax);
   2780   }
   2781 
   2782   // Restore input_reg after call to runtime.
   2783   __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
   2784 
   2785   __ bind(&allocated);
   2786   __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset));
   2787   __ shl(tmp2, Immediate(1));
   2788   __ shr(tmp2, Immediate(1));
   2789   __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2);
   2790   __ StoreToSafepointRegisterSlot(input_reg, tmp);
   2791 
   2792   __ bind(&done);
   2793 }
   2794 
   2795 
   2796 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
   2797   Register input_reg = ToRegister(instr->InputAt(0));
   2798   __ testl(input_reg, input_reg);
   2799   Label is_positive;
   2800   __ j(not_sign, &is_positive);
   2801   __ negl(input_reg);  // Sets flags.
   2802   DeoptimizeIf(negative, instr->environment());
   2803   __ bind(&is_positive);
   2804 }
   2805 
   2806 
   2807 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
   2808   // Class for deferred case.
   2809   class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
   2810    public:
   2811     DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
   2812                                     LUnaryMathOperation* instr)
   2813         : LDeferredCode(codegen), instr_(instr) { }
   2814     virtual void Generate() {
   2815       codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
   2816     }
   2817     virtual LInstruction* instr() { return instr_; }
   2818    private:
   2819     LUnaryMathOperation* instr_;
   2820   };
   2821 
   2822   ASSERT(instr->InputAt(0)->Equals(instr->result()));
   2823   Representation r = instr->hydrogen()->value()->representation();
   2824 
   2825   if (r.IsDouble()) {
   2826     XMMRegister scratch = xmm0;
   2827     XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   2828     __ xorps(scratch, scratch);
   2829     __ subsd(scratch, input_reg);
   2830     __ andpd(input_reg, scratch);
   2831   } else if (r.IsInteger32()) {
   2832     EmitIntegerMathAbs(instr);
   2833   } else {  // Tagged case.
   2834     DeferredMathAbsTaggedHeapNumber* deferred =
   2835         new DeferredMathAbsTaggedHeapNumber(this, instr);
   2836     Register input_reg = ToRegister(instr->InputAt(0));
   2837     // Smi check.
   2838     __ JumpIfNotSmi(input_reg, deferred->entry());
   2839     __ SmiToInteger32(input_reg, input_reg);
   2840     EmitIntegerMathAbs(instr);
   2841     __ Integer32ToSmi(input_reg, input_reg);
   2842     __ bind(deferred->exit());
   2843   }
   2844 }
   2845 
   2846 
   2847 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
   2848   XMMRegister xmm_scratch = xmm0;
   2849   Register output_reg = ToRegister(instr->result());
   2850   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   2851   Label done;
   2852 
   2853   if (CpuFeatures::IsSupported(SSE4_1)) {
   2854     CpuFeatures::Scope scope(SSE4_1);
   2855     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
   2856       // Deoptimize if minus zero.
   2857       __ movq(output_reg, input_reg);
   2858       __ subq(output_reg, Immediate(1));
   2859       DeoptimizeIf(overflow, instr->environment());
   2860     }
   2861     __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
   2862     __ cvttsd2si(output_reg, xmm_scratch);
   2863     __ cmpl(output_reg, Immediate(0x80000000));
   2864     DeoptimizeIf(equal, instr->environment());
   2865   } else {
   2866     // Deoptimize on negative inputs.
   2867     __ xorps(xmm_scratch, xmm_scratch);  // Zero the register.
   2868     __ ucomisd(input_reg, xmm_scratch);
   2869     DeoptimizeIf(below, instr->environment());
   2870     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
   2871       // Check for negative zero.
   2872       Label positive_sign;
   2873       __ j(above, &positive_sign, Label::kNear);
   2874       __ movmskpd(output_reg, input_reg);
   2875       __ testq(output_reg, Immediate(1));
   2876       DeoptimizeIf(not_zero, instr->environment());
   2877       __ Set(output_reg, 0);
   2878       __ jmp(&done);
   2879       __ bind(&positive_sign);
   2880     }
   2881 
   2882     // Use truncating instruction (OK because input is positive).
   2883     __ cvttsd2si(output_reg, input_reg);
   2884 
   2885     // Overflow is signalled with minint.
   2886     __ cmpl(output_reg, Immediate(0x80000000));
   2887     DeoptimizeIf(equal, instr->environment());
   2888   }
   2889   __ bind(&done);
   2890 }
   2891 
   2892 
   2893 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
   2894   const XMMRegister xmm_scratch = xmm0;
   2895   Register output_reg = ToRegister(instr->result());
   2896   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   2897 
   2898   Label done;
   2899   // xmm_scratch = 0.5
   2900   __ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE);
   2901   __ movq(xmm_scratch, kScratchRegister);
   2902   Label below_half;
   2903   __ ucomisd(xmm_scratch, input_reg);
   2904   // If input_reg is NaN, this doesn't jump.
   2905   __ j(above, &below_half, Label::kNear);
   2906   // input = input + 0.5
   2907   // This addition might give a result that isn't the correct for
   2908   // rounding, due to loss of precision, but only for a number that's
   2909   // so big that the conversion below will overflow anyway.
   2910   __ addsd(xmm_scratch, input_reg);
   2911   // Compute Math.floor(input).
   2912   // Use truncating instruction (OK because input is positive).
   2913   __ cvttsd2si(output_reg, xmm_scratch);
   2914   // Overflow is signalled with minint.
   2915   __ cmpl(output_reg, Immediate(0x80000000));
   2916   DeoptimizeIf(equal, instr->environment());
   2917   __ jmp(&done);
   2918 
   2919   __ bind(&below_half);
   2920   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
   2921     // Bailout if negative (including -0).
   2922     __ movq(output_reg, input_reg);
   2923     __ testq(output_reg, output_reg);
   2924     DeoptimizeIf(negative, instr->environment());
   2925   } else {
   2926     // Bailout if below -0.5, otherwise round to (positive) zero, even
   2927     // if negative.
   2928     // xmm_scrach = -0.5
   2929     __ movq(kScratchRegister, V8_INT64_C(0xBFE0000000000000), RelocInfo::NONE);
   2930     __ movq(xmm_scratch, kScratchRegister);
   2931     __ ucomisd(input_reg, xmm_scratch);
   2932     DeoptimizeIf(below, instr->environment());
   2933   }
   2934   __ xorl(output_reg, output_reg);
   2935 
   2936   __ bind(&done);
   2937 }
   2938 
   2939 
   2940 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
   2941   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   2942   ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
   2943   __ sqrtsd(input_reg, input_reg);
   2944 }
   2945 
   2946 
   2947 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
   2948   XMMRegister xmm_scratch = xmm0;
   2949   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   2950   ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
   2951 
   2952   // Note that according to ECMA-262 15.8.2.13:
   2953   // Math.pow(-Infinity, 0.5) == Infinity
   2954   // Math.sqrt(-Infinity) == NaN
   2955   Label done, sqrt;
   2956   // Check base for -Infinity.  According to IEEE-754, double-precision
   2957   // -Infinity has the highest 12 bits set and the lowest 52 bits cleared.
   2958   __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000), RelocInfo::NONE);
   2959   __ movq(xmm_scratch, kScratchRegister);
   2960   __ ucomisd(xmm_scratch, input_reg);
   2961   // Comparing -Infinity with NaN results in "unordered", which sets the
   2962   // zero flag as if both were equal.  However, it also sets the carry flag.
   2963   __ j(not_equal, &sqrt, Label::kNear);
   2964   __ j(carry, &sqrt, Label::kNear);
   2965   // If input is -Infinity, return Infinity.
   2966   __ xorps(input_reg, input_reg);
   2967   __ subsd(input_reg, xmm_scratch);
   2968   __ jmp(&done, Label::kNear);
   2969 
   2970   // Square root.
   2971   __ bind(&sqrt);
   2972   __ xorps(xmm_scratch, xmm_scratch);
   2973   __ addsd(input_reg, xmm_scratch);  // Convert -0 to +0.
   2974   __ sqrtsd(input_reg, input_reg);
   2975   __ bind(&done);
   2976 }
   2977 
   2978 
   2979 void LCodeGen::DoPower(LPower* instr) {
   2980   Representation exponent_type = instr->hydrogen()->right()->representation();
   2981   // Having marked this as a call, we can use any registers.
   2982   // Just make sure that the input/output registers are the expected ones.
   2983 
   2984   // Choose register conforming to calling convention (when bailing out).
   2985 #ifdef _WIN64
   2986   Register exponent = rdx;
   2987 #else
   2988   Register exponent = rdi;
   2989 #endif
   2990   ASSERT(!instr->InputAt(1)->IsRegister() ||
   2991          ToRegister(instr->InputAt(1)).is(exponent));
   2992   ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
   2993          ToDoubleRegister(instr->InputAt(1)).is(xmm1));
   2994   ASSERT(ToDoubleRegister(instr->InputAt(0)).is(xmm2));
   2995   ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
   2996 
   2997   if (exponent_type.IsTagged()) {
   2998     Label no_deopt;
   2999     __ JumpIfSmi(exponent, &no_deopt);
   3000     __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx);
   3001     DeoptimizeIf(not_equal, instr->environment());
   3002     __ bind(&no_deopt);
   3003     MathPowStub stub(MathPowStub::TAGGED);
   3004     __ CallStub(&stub);
   3005   } else if (exponent_type.IsInteger32()) {
   3006     MathPowStub stub(MathPowStub::INTEGER);
   3007     __ CallStub(&stub);
   3008   } else {
   3009     ASSERT(exponent_type.IsDouble());
   3010     MathPowStub stub(MathPowStub::DOUBLE);
   3011     __ CallStub(&stub);
   3012   }
   3013 }
   3014 
   3015 
   3016 void LCodeGen::DoRandom(LRandom* instr) {
   3017   class DeferredDoRandom: public LDeferredCode {
   3018    public:
   3019     DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
   3020         : LDeferredCode(codegen), instr_(instr) { }
   3021     virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
   3022     virtual LInstruction* instr() { return instr_; }
   3023    private:
   3024     LRandom* instr_;
   3025   };
   3026 
   3027   DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
   3028 
   3029   // Having marked this instruction as a call we can use any
   3030   // registers.
   3031   ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
   3032 
   3033   // Choose the right register for the first argument depending on
   3034   // calling convention.
   3035 #ifdef _WIN64
   3036   ASSERT(ToRegister(instr->InputAt(0)).is(rcx));
   3037   Register global_object = rcx;
   3038 #else
   3039   ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
   3040   Register global_object = rdi;
   3041 #endif
   3042 
   3043   static const int kSeedSize = sizeof(uint32_t);
   3044   STATIC_ASSERT(kPointerSize == 2 * kSeedSize);
   3045 
   3046   __ movq(global_object,
   3047           FieldOperand(global_object, GlobalObject::kGlobalContextOffset));
   3048   static const int kRandomSeedOffset =
   3049       FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
   3050   __ movq(rbx, FieldOperand(global_object, kRandomSeedOffset));
   3051   // rbx: FixedArray of the global context's random seeds
   3052 
   3053   // Load state[0].
   3054   __ movl(rax, FieldOperand(rbx, ByteArray::kHeaderSize));
   3055   // If state[0] == 0, call runtime to initialize seeds.
   3056   __ testl(rax, rax);
   3057   __ j(zero, deferred->entry());
   3058   // Load state[1].
   3059   __ movl(rcx, FieldOperand(rbx, ByteArray::kHeaderSize + kSeedSize));
   3060 
   3061   // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
   3062   // Only operate on the lower 32 bit of rax.
   3063   __ movl(rdx, rax);
   3064   __ andl(rdx, Immediate(0xFFFF));
   3065   __ imull(rdx, rdx, Immediate(18273));
   3066   __ shrl(rax, Immediate(16));
   3067   __ addl(rax, rdx);
   3068   // Save state[0].
   3069   __ movl(FieldOperand(rbx, ByteArray::kHeaderSize), rax);
   3070 
   3071   // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
   3072   __ movl(rdx, rcx);
   3073   __ andl(rdx, Immediate(0xFFFF));
   3074   __ imull(rdx, rdx, Immediate(36969));
   3075   __ shrl(rcx, Immediate(16));
   3076   __ addl(rcx, rdx);
   3077   // Save state[1].
   3078   __ movl(FieldOperand(rbx, ByteArray::kHeaderSize + kSeedSize), rcx);
   3079 
   3080   // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
   3081   __ shll(rax, Immediate(14));
   3082   __ andl(rcx, Immediate(0x3FFFF));
   3083   __ addl(rax, rcx);
   3084 
   3085   __ bind(deferred->exit());
   3086   // Convert 32 random bits in rax to 0.(32 random bits) in a double
   3087   // by computing:
   3088   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
   3089   __ movl(rcx, Immediate(0x49800000));  // 1.0 x 2^20 as single.
   3090   __ movd(xmm2, rcx);
   3091   __ movd(xmm1, rax);
   3092   __ cvtss2sd(xmm2, xmm2);
   3093   __ xorps(xmm1, xmm2);
   3094   __ subsd(xmm1, xmm2);
   3095 }
   3096 
   3097 
   3098 void LCodeGen::DoDeferredRandom(LRandom* instr) {
   3099   __ PrepareCallCFunction(1);
   3100   __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
   3101   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   3102   // Return value is in rax.
   3103 }
   3104 
   3105 
   3106 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
   3107   ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
   3108   TranscendentalCacheStub stub(TranscendentalCache::LOG,
   3109                                TranscendentalCacheStub::UNTAGGED);
   3110   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   3111 }
   3112 
   3113 
   3114 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
   3115   ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
   3116   TranscendentalCacheStub stub(TranscendentalCache::TAN,
   3117                                TranscendentalCacheStub::UNTAGGED);
   3118   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   3119 }
   3120 
   3121 
   3122 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
   3123   ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
   3124   TranscendentalCacheStub stub(TranscendentalCache::COS,
   3125                                TranscendentalCacheStub::UNTAGGED);
   3126   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   3127 }
   3128 
   3129 
   3130 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
   3131   ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
   3132   TranscendentalCacheStub stub(TranscendentalCache::SIN,
   3133                                TranscendentalCacheStub::UNTAGGED);
   3134   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   3135 }
   3136 
   3137 
   3138 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
   3139   switch (instr->op()) {
   3140     case kMathAbs:
   3141       DoMathAbs(instr);
   3142       break;
   3143     case kMathFloor:
   3144       DoMathFloor(instr);
   3145       break;
   3146     case kMathRound:
   3147       DoMathRound(instr);
   3148       break;
   3149     case kMathSqrt:
   3150       DoMathSqrt(instr);
   3151       break;
   3152     case kMathPowHalf:
   3153       DoMathPowHalf(instr);
   3154       break;
   3155     case kMathCos:
   3156       DoMathCos(instr);
   3157       break;
   3158     case kMathSin:
   3159       DoMathSin(instr);
   3160       break;
   3161     case kMathTan:
   3162       DoMathTan(instr);
   3163       break;
   3164     case kMathLog:
   3165       DoMathLog(instr);
   3166       break;
   3167 
   3168     default:
   3169       UNREACHABLE();
   3170   }
   3171 }
   3172 
   3173 
   3174 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
   3175   ASSERT(ToRegister(instr->function()).is(rdi));
   3176   ASSERT(instr->HasPointerMap());
   3177   ASSERT(instr->HasDeoptimizationEnvironment());
   3178   LPointerMap* pointers = instr->pointer_map();
   3179   RecordPosition(pointers->position());
   3180   SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
   3181   ParameterCount count(instr->arity());
   3182   __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
   3183   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   3184 }
   3185 
   3186 
   3187 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
   3188   ASSERT(ToRegister(instr->key()).is(rcx));
   3189   ASSERT(ToRegister(instr->result()).is(rax));
   3190 
   3191   int arity = instr->arity();
   3192   Handle<Code> ic =
   3193       isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
   3194   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   3195   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   3196 }
   3197 
   3198 
   3199 void LCodeGen::DoCallNamed(LCallNamed* instr) {
   3200   ASSERT(ToRegister(instr->result()).is(rax));
   3201 
   3202   int arity = instr->arity();
   3203   RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
   3204   Handle<Code> ic =
   3205       isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
   3206   __ Move(rcx, instr->name());
   3207   CallCode(ic, mode, instr);
   3208   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   3209 }
   3210 
   3211 
   3212 void LCodeGen::DoCallFunction(LCallFunction* instr) {
   3213   ASSERT(ToRegister(instr->function()).is(rdi));
   3214   ASSERT(ToRegister(instr->result()).is(rax));
   3215 
   3216   int arity = instr->arity();
   3217   CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
   3218   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   3219   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   3220 }
   3221 
   3222 
   3223 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
   3224   ASSERT(ToRegister(instr->result()).is(rax));
   3225   int arity = instr->arity();
   3226   RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
   3227   Handle<Code> ic =
   3228       isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
   3229   __ Move(rcx, instr->name());
   3230   CallCode(ic, mode, instr);
   3231   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   3232 }
   3233 
   3234 
   3235 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
   3236   ASSERT(ToRegister(instr->result()).is(rax));
   3237   CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
   3238 }
   3239 
   3240 
   3241 void LCodeGen::DoCallNew(LCallNew* instr) {
   3242   ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
   3243   ASSERT(ToRegister(instr->result()).is(rax));
   3244 
   3245   CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
   3246   __ Set(rax, instr->arity());
   3247   CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
   3248 }
   3249 
   3250 
   3251 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
   3252   CallRuntime(instr->function(), instr->arity(), instr);
   3253 }
   3254 
   3255 
   3256 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
   3257   Register object = ToRegister(instr->object());
   3258   Register value = ToRegister(instr->value());
   3259   int offset = instr->offset();
   3260 
   3261   if (!instr->transition().is_null()) {
   3262     __ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
   3263   }
   3264 
   3265   // Do the store.
   3266   HType type = instr->hydrogen()->value()->type();
   3267   SmiCheck check_needed =
   3268       type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
   3269   if (instr->is_in_object()) {
   3270     __ movq(FieldOperand(object, offset), value);
   3271     if (instr->hydrogen()->NeedsWriteBarrier()) {
   3272       Register temp = ToRegister(instr->TempAt(0));
   3273       // Update the write barrier for the object for in-object properties.
   3274       __ RecordWriteField(object,
   3275                           offset,
   3276                           value,
   3277                           temp,
   3278                           kSaveFPRegs,
   3279                           EMIT_REMEMBERED_SET,
   3280                           check_needed);
   3281     }
   3282   } else {
   3283     Register temp = ToRegister(instr->TempAt(0));
   3284     __ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset));
   3285     __ movq(FieldOperand(temp, offset), value);
   3286     if (instr->hydrogen()->NeedsWriteBarrier()) {
   3287       // Update the write barrier for the properties array.
   3288       // object is used as a scratch register.
   3289       __ RecordWriteField(temp,
   3290                           offset,
   3291                           value,
   3292                           object,
   3293                           kSaveFPRegs,
   3294                           EMIT_REMEMBERED_SET,
   3295                           check_needed);
   3296     }
   3297   }
   3298 }
   3299 
   3300 
   3301 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
   3302   ASSERT(ToRegister(instr->object()).is(rdx));
   3303   ASSERT(ToRegister(instr->value()).is(rax));
   3304 
   3305   __ Move(rcx, instr->hydrogen()->name());
   3306   Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
   3307       ? isolate()->builtins()->StoreIC_Initialize_Strict()
   3308       : isolate()->builtins()->StoreIC_Initialize();
   3309   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   3310 }
   3311 
   3312 
   3313 void LCodeGen::DoStoreKeyedSpecializedArrayElement(
   3314     LStoreKeyedSpecializedArrayElement* instr) {
   3315   ElementsKind elements_kind = instr->elements_kind();
   3316   Operand operand(BuildFastArrayOperand(instr->external_pointer(),
   3317                                         instr->key(), elements_kind, 0));
   3318   if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
   3319     XMMRegister value(ToDoubleRegister(instr->value()));
   3320     __ cvtsd2ss(value, value);
   3321     __ movss(operand, value);
   3322   } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
   3323     __ movsd(operand, ToDoubleRegister(instr->value()));
   3324   } else {
   3325     Register value(ToRegister(instr->value()));
   3326     switch (elements_kind) {
   3327       case EXTERNAL_PIXEL_ELEMENTS:
   3328       case EXTERNAL_BYTE_ELEMENTS:
   3329       case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
   3330         __ movb(operand, value);
   3331         break;
   3332       case EXTERNAL_SHORT_ELEMENTS:
   3333       case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
   3334         __ movw(operand, value);
   3335         break;
   3336       case EXTERNAL_INT_ELEMENTS:
   3337       case EXTERNAL_UNSIGNED_INT_ELEMENTS:
   3338         __ movl(operand, value);
   3339         break;
   3340       case EXTERNAL_FLOAT_ELEMENTS:
   3341       case EXTERNAL_DOUBLE_ELEMENTS:
   3342       case FAST_ELEMENTS:
   3343       case FAST_SMI_ONLY_ELEMENTS:
   3344       case FAST_DOUBLE_ELEMENTS:
   3345       case DICTIONARY_ELEMENTS:
   3346       case NON_STRICT_ARGUMENTS_ELEMENTS:
   3347         UNREACHABLE();
   3348         break;
   3349     }
   3350   }
   3351 }
   3352 
   3353 
   3354 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
   3355   if (instr->length()->IsRegister()) {
   3356     Register reg = ToRegister(instr->length());
   3357     if (FLAG_debug_code) {
   3358       __ AbortIfNotZeroExtended(reg);
   3359     }
   3360     if (instr->index()->IsConstantOperand()) {
   3361       __ cmpq(reg,
   3362               Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
   3363     } else {
   3364       Register reg2 = ToRegister(instr->index());
   3365       if (FLAG_debug_code) {
   3366         __ AbortIfNotZeroExtended(reg2);
   3367       }
   3368       __ cmpq(reg, reg2);
   3369     }
   3370   } else {
   3371     if (instr->index()->IsConstantOperand()) {
   3372       __ cmpq(ToOperand(instr->length()),
   3373               Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
   3374     } else {
   3375       __ cmpq(ToOperand(instr->length()), ToRegister(instr->index()));
   3376     }
   3377   }
   3378   DeoptimizeIf(below_equal, instr->environment());
   3379 }
   3380 
   3381 
   3382 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
   3383   Register value = ToRegister(instr->value());
   3384   Register elements = ToRegister(instr->object());
   3385   Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
   3386 
   3387   // Do the store.
   3388   if (instr->key()->IsConstantOperand()) {
   3389     ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
   3390     LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
   3391     int offset =
   3392         ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
   3393     __ movq(FieldOperand(elements, offset), value);
   3394   } else {
   3395     __ movq(FieldOperand(elements,
   3396                          key,
   3397                          times_pointer_size,
   3398                          FixedArray::kHeaderSize),
   3399             value);
   3400   }
   3401 
   3402   if (instr->hydrogen()->NeedsWriteBarrier()) {
   3403     HType type = instr->hydrogen()->value()->type();
   3404     SmiCheck check_needed =
   3405         type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
   3406     // Compute address of modified element and store it into key register.
   3407     __ lea(key, FieldOperand(elements,
   3408                              key,
   3409                              times_pointer_size,
   3410                              FixedArray::kHeaderSize));
   3411     __ RecordWrite(elements,
   3412                    key,
   3413                    value,
   3414                    kSaveFPRegs,
   3415                    EMIT_REMEMBERED_SET,
   3416                    check_needed);
   3417   }
   3418 }
   3419 
   3420 
   3421 void LCodeGen::DoStoreKeyedFastDoubleElement(
   3422     LStoreKeyedFastDoubleElement* instr) {
   3423   XMMRegister value = ToDoubleRegister(instr->value());
   3424   Label have_value;
   3425 
   3426   __ ucomisd(value, value);
   3427   __ j(parity_odd, &have_value);  // NaN.
   3428 
   3429   __ Set(kScratchRegister, BitCast<uint64_t>(
   3430       FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
   3431   __ movq(value, kScratchRegister);
   3432 
   3433   __ bind(&have_value);
   3434   Operand double_store_operand = BuildFastArrayOperand(
   3435       instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
   3436       FixedDoubleArray::kHeaderSize - kHeapObjectTag);
   3437   __ movsd(double_store_operand, value);
   3438 }
   3439 
   3440 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
   3441   ASSERT(ToRegister(instr->object()).is(rdx));
   3442   ASSERT(ToRegister(instr->key()).is(rcx));
   3443   ASSERT(ToRegister(instr->value()).is(rax));
   3444 
   3445   Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
   3446       ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
   3447       : isolate()->builtins()->KeyedStoreIC_Initialize();
   3448   CallCode(ic, RelocInfo::CODE_TARGET, instr);
   3449 }
   3450 
   3451 
   3452 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
   3453   Register object_reg = ToRegister(instr->object());
   3454   Register new_map_reg = ToRegister(instr->new_map_reg());
   3455 
   3456   Handle<Map> from_map = instr->original_map();
   3457   Handle<Map> to_map = instr->transitioned_map();
   3458   ElementsKind from_kind = from_map->elements_kind();
   3459   ElementsKind to_kind = to_map->elements_kind();
   3460 
   3461   Label not_applicable;
   3462   __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
   3463   __ j(not_equal, &not_applicable);
   3464   __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
   3465   if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
   3466     __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
   3467     // Write barrier.
   3468     ASSERT_NE(instr->temp_reg(), NULL);
   3469     __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
   3470                         ToRegister(instr->temp_reg()), kDontSaveFPRegs);
   3471   } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
   3472       to_kind == FAST_DOUBLE_ELEMENTS) {
   3473     Register fixed_object_reg = ToRegister(instr->temp_reg());
   3474     ASSERT(fixed_object_reg.is(rdx));
   3475     ASSERT(new_map_reg.is(rbx));
   3476     __ movq(fixed_object_reg, object_reg);
   3477     CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
   3478              RelocInfo::CODE_TARGET, instr);
   3479   } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
   3480     Register fixed_object_reg = ToRegister(instr->temp_reg());
   3481     ASSERT(fixed_object_reg.is(rdx));
   3482     ASSERT(new_map_reg.is(rbx));
   3483     __ movq(fixed_object_reg, object_reg);
   3484     CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
   3485              RelocInfo::CODE_TARGET, instr);
   3486   } else {
   3487     UNREACHABLE();
   3488   }
   3489   __ bind(&not_applicable);
   3490 }
   3491 
   3492 
   3493 void LCodeGen::DoStringAdd(LStringAdd* instr) {
   3494   EmitPushTaggedOperand(instr->left());
   3495   EmitPushTaggedOperand(instr->right());
   3496   StringAddStub stub(NO_STRING_CHECK_IN_STUB);
   3497   CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   3498 }
   3499 
   3500 
   3501 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
   3502   class DeferredStringCharCodeAt: public LDeferredCode {
   3503    public:
   3504     DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
   3505         : LDeferredCode(codegen), instr_(instr) { }
   3506     virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
   3507     virtual LInstruction* instr() { return instr_; }
   3508    private:
   3509     LStringCharCodeAt* instr_;
   3510   };
   3511 
   3512   DeferredStringCharCodeAt* deferred =
   3513       new DeferredStringCharCodeAt(this, instr);
   3514 
   3515   StringCharLoadGenerator::Generate(masm(),
   3516                                     ToRegister(instr->string()),
   3517                                     ToRegister(instr->index()),
   3518                                     ToRegister(instr->result()),
   3519                                     deferred->entry());
   3520   __ bind(deferred->exit());
   3521 }
   3522 
   3523 
   3524 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
   3525   Register string = ToRegister(instr->string());
   3526   Register result = ToRegister(instr->result());
   3527 
   3528   // TODO(3095996): Get rid of this. For now, we need to make the
   3529   // result register contain a valid pointer because it is already
   3530   // contained in the register pointer map.
   3531   __ Set(result, 0);
   3532 
   3533   PushSafepointRegistersScope scope(this);
   3534   __ push(string);
   3535   // Push the index as a smi. This is safe because of the checks in
   3536   // DoStringCharCodeAt above.
   3537   STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
   3538   if (instr->index()->IsConstantOperand()) {
   3539     int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
   3540     __ Push(Smi::FromInt(const_index));
   3541   } else {
   3542     Register index = ToRegister(instr->index());
   3543     __ Integer32ToSmi(index, index);
   3544     __ push(index);
   3545   }
   3546   CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
   3547   if (FLAG_debug_code) {
   3548     __ AbortIfNotSmi(rax);
   3549   }
   3550   __ SmiToInteger32(rax, rax);
   3551   __ StoreToSafepointRegisterSlot(result, rax);
   3552 }
   3553 
   3554 
   3555 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
   3556   class DeferredStringCharFromCode: public LDeferredCode {
   3557    public:
   3558     DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
   3559         : LDeferredCode(codegen), instr_(instr) { }
   3560     virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
   3561     virtual LInstruction* instr() { return instr_; }
   3562    private:
   3563     LStringCharFromCode* instr_;
   3564   };
   3565 
   3566   DeferredStringCharFromCode* deferred =
   3567       new DeferredStringCharFromCode(this, instr);
   3568 
   3569   ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
   3570   Register char_code = ToRegister(instr->char_code());
   3571   Register result = ToRegister(instr->result());
   3572   ASSERT(!char_code.is(result));
   3573 
   3574   __ cmpl(char_code, Immediate(String::kMaxAsciiCharCode));
   3575   __ j(above, deferred->entry());
   3576   __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
   3577   __ movq(result, FieldOperand(result,
   3578                                char_code, times_pointer_size,
   3579                                FixedArray::kHeaderSize));
   3580   __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
   3581   __ j(equal, deferred->entry());
   3582   __ bind(deferred->exit());
   3583 }
   3584 
   3585 
   3586 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
   3587   Register char_code = ToRegister(instr->char_code());
   3588   Register result = ToRegister(instr->result());
   3589 
   3590   // TODO(3095996): Get rid of this. For now, we need to make the
   3591   // result register contain a valid pointer because it is already
   3592   // contained in the register pointer map.
   3593   __ Set(result, 0);
   3594 
   3595   PushSafepointRegistersScope scope(this);
   3596   __ Integer32ToSmi(char_code, char_code);
   3597   __ push(char_code);
   3598   CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
   3599   __ StoreToSafepointRegisterSlot(result, rax);
   3600 }
   3601 
   3602 
   3603 void LCodeGen::DoStringLength(LStringLength* instr) {
   3604   Register string = ToRegister(instr->string());
   3605   Register result = ToRegister(instr->result());
   3606   __ movq(result, FieldOperand(string, String::kLengthOffset));
   3607 }
   3608 
   3609 
   3610 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
   3611   LOperand* input = instr->InputAt(0);
   3612   ASSERT(input->IsRegister() || input->IsStackSlot());
   3613   LOperand* output = instr->result();
   3614   ASSERT(output->IsDoubleRegister());
   3615   if (input->IsRegister()) {
   3616     __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
   3617   } else {
   3618     __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
   3619   }
   3620 }
   3621 
   3622 
   3623 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
   3624   LOperand* input = instr->InputAt(0);
   3625   ASSERT(input->IsRegister() && input->Equals(instr->result()));
   3626   Register reg = ToRegister(input);
   3627 
   3628   __ Integer32ToSmi(reg, reg);
   3629 }
   3630 
   3631 
   3632 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
   3633   class DeferredNumberTagD: public LDeferredCode {
   3634    public:
   3635     DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
   3636         : LDeferredCode(codegen), instr_(instr) { }
   3637     virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
   3638     virtual LInstruction* instr() { return instr_; }
   3639    private:
   3640     LNumberTagD* instr_;
   3641   };
   3642 
   3643   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   3644   Register reg = ToRegister(instr->result());
   3645   Register tmp = ToRegister(instr->TempAt(0));
   3646 
   3647   DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
   3648   if (FLAG_inline_new) {
   3649     __ AllocateHeapNumber(reg, tmp, deferred->entry());
   3650   } else {
   3651     __ jmp(deferred->entry());
   3652   }
   3653   __ bind(deferred->exit());
   3654   __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
   3655 }
   3656 
   3657 
   3658 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
   3659   // TODO(3095996): Get rid of this. For now, we need to make the
   3660   // result register contain a valid pointer because it is already
   3661   // contained in the register pointer map.
   3662   Register reg = ToRegister(instr->result());
   3663   __ Move(reg, Smi::FromInt(0));
   3664 
   3665   {
   3666     PushSafepointRegistersScope scope(this);
   3667     CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
   3668     // Ensure that value in rax survives popping registers.
   3669     __ movq(kScratchRegister, rax);
   3670   }
   3671   __ movq(reg, kScratchRegister);
   3672 }
   3673 
   3674 
   3675 void LCodeGen::DoSmiTag(LSmiTag* instr) {
   3676   ASSERT(instr->InputAt(0)->Equals(instr->result()));
   3677   Register input = ToRegister(instr->InputAt(0));
   3678   ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
   3679   __ Integer32ToSmi(input, input);
   3680 }
   3681 
   3682 
   3683 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
   3684   ASSERT(instr->InputAt(0)->Equals(instr->result()));
   3685   Register input = ToRegister(instr->InputAt(0));
   3686   if (instr->needs_check()) {
   3687     Condition is_smi = __ CheckSmi(input);
   3688     DeoptimizeIf(NegateCondition(is_smi), instr->environment());
   3689   }
   3690   __ SmiToInteger32(input, input);
   3691 }
   3692 
   3693 
   3694 void LCodeGen::EmitNumberUntagD(Register input_reg,
   3695                                 XMMRegister result_reg,
   3696                                 bool deoptimize_on_undefined,
   3697                                 bool deoptimize_on_minus_zero,
   3698                                 LEnvironment* env) {
   3699   Label load_smi, done;
   3700 
   3701   // Smi check.
   3702   __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
   3703 
   3704   // Heap number map check.
   3705   __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
   3706                  Heap::kHeapNumberMapRootIndex);
   3707   if (deoptimize_on_undefined) {
   3708     DeoptimizeIf(not_equal, env);
   3709   } else {
   3710     Label heap_number;
   3711     __ j(equal, &heap_number, Label::kNear);
   3712 
   3713     __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
   3714     DeoptimizeIf(not_equal, env);
   3715 
   3716     // Convert undefined to NaN. Compute NaN as 0/0.
   3717     __ xorps(result_reg, result_reg);
   3718     __ divsd(result_reg, result_reg);
   3719     __ jmp(&done, Label::kNear);
   3720 
   3721     __ bind(&heap_number);
   3722   }
   3723   // Heap number to XMM conversion.
   3724   __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
   3725   if (deoptimize_on_minus_zero) {
   3726     XMMRegister xmm_scratch = xmm0;
   3727     __ xorps(xmm_scratch, xmm_scratch);
   3728     __ ucomisd(xmm_scratch, result_reg);
   3729     __ j(not_equal, &done, Label::kNear);
   3730     __ movmskpd(kScratchRegister, result_reg);
   3731     __ testq(kScratchRegister, Immediate(1));
   3732     DeoptimizeIf(not_zero, env);
   3733   }
   3734   __ jmp(&done, Label::kNear);
   3735 
   3736   // Smi to XMM conversion
   3737   __ bind(&load_smi);
   3738   __ SmiToInteger32(kScratchRegister, input_reg);
   3739   __ cvtlsi2sd(result_reg, kScratchRegister);
   3740   __ bind(&done);
   3741 }
   3742 
   3743 
   3744 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
   3745   Label done, heap_number;
   3746   Register input_reg = ToRegister(instr->InputAt(0));
   3747 
   3748   // Heap number map check.
   3749   __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
   3750                  Heap::kHeapNumberMapRootIndex);
   3751 
   3752   if (instr->truncating()) {
   3753     __ j(equal, &heap_number, Label::kNear);
   3754     // Check for undefined. Undefined is converted to zero for truncating
   3755     // conversions.
   3756     __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
   3757     DeoptimizeIf(not_equal, instr->environment());
   3758     __ Set(input_reg, 0);
   3759     __ jmp(&done, Label::kNear);
   3760 
   3761     __ bind(&heap_number);
   3762 
   3763     __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
   3764     __ cvttsd2siq(input_reg, xmm0);
   3765     __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000));
   3766     __ cmpq(input_reg, kScratchRegister);
   3767     DeoptimizeIf(equal, instr->environment());
   3768   } else {
   3769     // Deoptimize if we don't have a heap number.
   3770     DeoptimizeIf(not_equal, instr->environment());
   3771 
   3772     XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
   3773     __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
   3774     __ cvttsd2si(input_reg, xmm0);
   3775     __ cvtlsi2sd(xmm_temp, input_reg);
   3776     __ ucomisd(xmm0, xmm_temp);
   3777     DeoptimizeIf(not_equal, instr->environment());
   3778     DeoptimizeIf(parity_even, instr->environment());  // NaN.
   3779     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
   3780       __ testl(input_reg, input_reg);
   3781       __ j(not_zero, &done);
   3782       __ movmskpd(input_reg, xmm0);
   3783       __ andl(input_reg, Immediate(1));
   3784       DeoptimizeIf(not_zero, instr->environment());
   3785     }
   3786   }
   3787   __ bind(&done);
   3788 }
   3789 
   3790 
   3791 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
   3792   class DeferredTaggedToI: public LDeferredCode {
   3793    public:
   3794     DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
   3795         : LDeferredCode(codegen), instr_(instr) { }
   3796     virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
   3797     virtual LInstruction* instr() { return instr_; }
   3798    private:
   3799     LTaggedToI* instr_;
   3800   };
   3801 
   3802   LOperand* input = instr->InputAt(0);
   3803   ASSERT(input->IsRegister());
   3804   ASSERT(input->Equals(instr->result()));
   3805 
   3806   Register input_reg = ToRegister(input);
   3807   DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
   3808   __ JumpIfNotSmi(input_reg, deferred->entry());
   3809   __ SmiToInteger32(input_reg, input_reg);
   3810   __ bind(deferred->exit());
   3811 }
   3812 
   3813 
   3814 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
   3815   LOperand* input = instr->InputAt(0);
   3816   ASSERT(input->IsRegister());
   3817   LOperand* result = instr->result();
   3818   ASSERT(result->IsDoubleRegister());
   3819 
   3820   Register input_reg = ToRegister(input);
   3821   XMMRegister result_reg = ToDoubleRegister(result);
   3822 
   3823   EmitNumberUntagD(input_reg, result_reg,
   3824                    instr->hydrogen()->deoptimize_on_undefined(),
   3825                    instr->hydrogen()->deoptimize_on_minus_zero(),
   3826                    instr->environment());
   3827 }
   3828 
   3829 
   3830 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
   3831   LOperand* input = instr->InputAt(0);
   3832   ASSERT(input->IsDoubleRegister());
   3833   LOperand* result = instr->result();
   3834   ASSERT(result->IsRegister());
   3835 
   3836   XMMRegister input_reg = ToDoubleRegister(input);
   3837   Register result_reg = ToRegister(result);
   3838 
   3839   if (instr->truncating()) {
   3840     // Performs a truncating conversion of a floating point number as used by
   3841     // the JS bitwise operations.
   3842     __ cvttsd2siq(result_reg, input_reg);
   3843     __ movq(kScratchRegister, V8_INT64_C(0x8000000000000000), RelocInfo::NONE);
   3844     __ cmpq(result_reg, kScratchRegister);
   3845     DeoptimizeIf(equal, instr->environment());
   3846   } else {
   3847     __ cvttsd2si(result_reg, input_reg);
   3848     __ cvtlsi2sd(xmm0, result_reg);
   3849     __ ucomisd(xmm0, input_reg);
   3850     DeoptimizeIf(not_equal, instr->environment());
   3851     DeoptimizeIf(parity_even, instr->environment());  // NaN.
   3852     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
   3853       Label done;
   3854       // The integer converted back is equal to the original. We
   3855       // only have to test if we got -0 as an input.
   3856       __ testl(result_reg, result_reg);
   3857       __ j(not_zero, &done, Label::kNear);
   3858       __ movmskpd(result_reg, input_reg);
   3859       // Bit 0 contains the sign of the double in input_reg.
   3860       // If input was positive, we are ok and return 0, otherwise
   3861       // deoptimize.
   3862       __ andl(result_reg, Immediate(1));
   3863       DeoptimizeIf(not_zero, instr->environment());
   3864       __ bind(&done);
   3865     }
   3866   }
   3867 }
   3868 
   3869 
   3870 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
   3871   LOperand* input = instr->InputAt(0);
   3872   Condition cc = masm()->CheckSmi(ToRegister(input));
   3873   DeoptimizeIf(NegateCondition(cc), instr->environment());
   3874 }
   3875 
   3876 
   3877 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
   3878   LOperand* input = instr->InputAt(0);
   3879   Condition cc = masm()->CheckSmi(ToRegister(input));
   3880   DeoptimizeIf(cc, instr->environment());
   3881 }
   3882 
   3883 
   3884 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
   3885   Register input = ToRegister(instr->InputAt(0));
   3886 
   3887   __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
   3888 
   3889   if (instr->hydrogen()->is_interval_check()) {
   3890     InstanceType first;
   3891     InstanceType last;
   3892     instr->hydrogen()->GetCheckInterval(&first, &last);
   3893 
   3894     __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
   3895             Immediate(static_cast<int8_t>(first)));
   3896 
   3897     // If there is only one type in the interval check for equality.
   3898     if (first == last) {
   3899       DeoptimizeIf(not_equal, instr->environment());
   3900     } else {
   3901       DeoptimizeIf(below, instr->environment());
   3902       // Omit check for the last type.
   3903       if (last != LAST_TYPE) {
   3904         __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
   3905                 Immediate(static_cast<int8_t>(last)));
   3906         DeoptimizeIf(above, instr->environment());
   3907       }
   3908     }
   3909   } else {
   3910     uint8_t mask;
   3911     uint8_t tag;
   3912     instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
   3913 
   3914     if (IsPowerOf2(mask)) {
   3915       ASSERT(tag == 0 || IsPowerOf2(tag));
   3916       __ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
   3917                Immediate(mask));
   3918       DeoptimizeIf(tag == 0 ? not_zero : zero, instr->environment());
   3919     } else {
   3920       __ movzxbl(kScratchRegister,
   3921                  FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
   3922       __ andb(kScratchRegister, Immediate(mask));
   3923       __ cmpb(kScratchRegister, Immediate(tag));
   3924       DeoptimizeIf(not_equal, instr->environment());
   3925     }
   3926   }
   3927 }
   3928 
   3929 
   3930 void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
   3931   Register reg = ToRegister(instr->value());
   3932   Handle<JSFunction> target = instr->hydrogen()->target();
   3933   if (isolate()->heap()->InNewSpace(*target)) {
   3934     Handle<JSGlobalPropertyCell> cell =
   3935         isolate()->factory()->NewJSGlobalPropertyCell(target);
   3936     __ movq(kScratchRegister, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
   3937     __ cmpq(reg, Operand(kScratchRegister, 0));
   3938   } else {
   3939     __ Cmp(reg, target);
   3940   }
   3941   DeoptimizeIf(not_equal, instr->environment());
   3942 }
   3943 
   3944 
   3945 void LCodeGen::DoCheckMapCommon(Register reg,
   3946                                 Handle<Map> map,
   3947                                 CompareMapMode mode,
   3948                                 LEnvironment* env) {
   3949   Label success;
   3950   __ CompareMap(reg, map, &success, mode);
   3951   DeoptimizeIf(not_equal, env);
   3952   __ bind(&success);
   3953 }
   3954 
   3955 
   3956 void LCodeGen::DoCheckMap(LCheckMap* instr) {
   3957   LOperand* input = instr->InputAt(0);
   3958   ASSERT(input->IsRegister());
   3959   Register reg = ToRegister(input);
   3960   Handle<Map> map = instr->hydrogen()->map();
   3961   DoCheckMapCommon(reg, map, instr->hydrogen()->mode(), instr->environment());
   3962 }
   3963 
   3964 
   3965 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
   3966   XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
   3967   Register result_reg = ToRegister(instr->result());
   3968   Register temp_reg = ToRegister(instr->TempAt(0));
   3969   __ ClampDoubleToUint8(value_reg, xmm0, result_reg, temp_reg);
   3970 }
   3971 
   3972 
   3973 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
   3974   ASSERT(instr->unclamped()->Equals(instr->result()));
   3975   Register value_reg = ToRegister(instr->result());
   3976   __ ClampUint8(value_reg);
   3977 }
   3978 
   3979 
   3980 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
   3981   ASSERT(instr->unclamped()->Equals(instr->result()));
   3982   Register input_reg = ToRegister(instr->unclamped());
   3983   Register temp_reg = ToRegister(instr->TempAt(0));
   3984   XMMRegister temp_xmm_reg = ToDoubleRegister(instr->TempAt(1));
   3985   Label is_smi, done, heap_number;
   3986 
   3987   __ JumpIfSmi(input_reg, &is_smi);
   3988 
   3989   // Check for heap number
   3990   __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
   3991          factory()->heap_number_map());
   3992   __ j(equal, &heap_number, Label::kNear);
   3993 
   3994   // Check for undefined. Undefined is converted to zero for clamping
   3995   // conversions.
   3996   __ Cmp(input_reg, factory()->undefined_value());
   3997   DeoptimizeIf(not_equal, instr->environment());
   3998   __ movq(input_reg, Immediate(0));
   3999   __ jmp(&done, Label::kNear);
   4000 
   4001   // Heap number
   4002   __ bind(&heap_number);
   4003   __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
   4004   __ ClampDoubleToUint8(xmm0, temp_xmm_reg, input_reg, temp_reg);
   4005   __ jmp(&done, Label::kNear);
   4006 
   4007   // smi
   4008   __ bind(&is_smi);
   4009   __ SmiToInteger32(input_reg, input_reg);
   4010   __ ClampUint8(input_reg);
   4011 
   4012   __ bind(&done);
   4013 }
   4014 
   4015 
   4016 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
   4017   Register reg = ToRegister(instr->TempAt(0));
   4018 
   4019   Handle<JSObject> holder = instr->holder();
   4020   Handle<JSObject> current_prototype = instr->prototype();
   4021 
   4022   // Load prototype object.
   4023   __ LoadHeapObject(reg, current_prototype);
   4024 
   4025   // Check prototype maps up to the holder.
   4026   while (!current_prototype.is_identical_to(holder)) {
   4027     DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
   4028                      ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
   4029     current_prototype =
   4030         Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
   4031     // Load next prototype object.
   4032     __ LoadHeapObject(reg, current_prototype);
   4033   }
   4034 
   4035   // Check the holder map.
   4036     DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
   4037                      ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
   4038 }
   4039 
   4040 
   4041 void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
   4042   class DeferredAllocateObject: public LDeferredCode {
   4043    public:
   4044     DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
   4045         : LDeferredCode(codegen), instr_(instr) { }
   4046     virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
   4047     virtual LInstruction* instr() { return instr_; }
   4048    private:
   4049     LAllocateObject* instr_;
   4050   };
   4051 
   4052   DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
   4053 
   4054   Register result = ToRegister(instr->result());
   4055   Register scratch = ToRegister(instr->TempAt(0));
   4056   Handle<JSFunction> constructor = instr->hydrogen()->constructor();
   4057   Handle<Map> initial_map(constructor->initial_map());
   4058   int instance_size = initial_map->instance_size();
   4059   ASSERT(initial_map->pre_allocated_property_fields() +
   4060          initial_map->unused_property_fields() -
   4061          initial_map->inobject_properties() == 0);
   4062 
   4063   // Allocate memory for the object.  The initial map might change when
   4064   // the constructor's prototype changes, but instance size and property
   4065   // counts remain unchanged (if slack tracking finished).
   4066   ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
   4067   __ AllocateInNewSpace(instance_size,
   4068                         result,
   4069                         no_reg,
   4070                         scratch,
   4071                         deferred->entry(),
   4072                         TAG_OBJECT);
   4073 
   4074   // Load the initial map.
   4075   Register map = scratch;
   4076   __ LoadHeapObject(scratch, constructor);
   4077   __ movq(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset));
   4078 
   4079   if (FLAG_debug_code) {
   4080     __ AbortIfSmi(map);
   4081     __ cmpb(FieldOperand(map, Map::kInstanceSizeOffset),
   4082             Immediate(instance_size >> kPointerSizeLog2));
   4083     __ Assert(equal, "Unexpected instance size");
   4084     __ cmpb(FieldOperand(map, Map::kPreAllocatedPropertyFieldsOffset),
   4085             Immediate(initial_map->pre_allocated_property_fields()));
   4086     __ Assert(equal, "Unexpected pre-allocated property fields count");
   4087     __ cmpb(FieldOperand(map, Map::kUnusedPropertyFieldsOffset),
   4088             Immediate(initial_map->unused_property_fields()));
   4089     __ Assert(equal, "Unexpected unused property fields count");
   4090     __ cmpb(FieldOperand(map, Map::kInObjectPropertiesOffset),
   4091             Immediate(initial_map->inobject_properties()));
   4092     __ Assert(equal, "Unexpected in-object property fields count");
   4093   }
   4094 
   4095   // Initialize map and fields of the newly allocated object.
   4096   ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
   4097   __ movq(FieldOperand(result, JSObject::kMapOffset), map);
   4098   __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
   4099   __ movq(FieldOperand(result, JSObject::kElementsOffset), scratch);
   4100   __ movq(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
   4101   if (initial_map->inobject_properties() != 0) {
   4102     __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
   4103     for (int i = 0; i < initial_map->inobject_properties(); i++) {
   4104       int property_offset = JSObject::kHeaderSize + i * kPointerSize;
   4105       __ movq(FieldOperand(result, property_offset), scratch);
   4106     }
   4107   }
   4108 
   4109   __ bind(deferred->exit());
   4110 }
   4111 
   4112 
   4113 void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
   4114   Register result = ToRegister(instr->result());
   4115   Handle<JSFunction> constructor = instr->hydrogen()->constructor();
   4116 
   4117   // TODO(3095996): Get rid of this. For now, we need to make the
   4118   // result register contain a valid pointer because it is already
   4119   // contained in the register pointer map.
   4120   __ Set(result, 0);
   4121 
   4122   PushSafepointRegistersScope scope(this);
   4123   __ PushHeapObject(constructor);
   4124   CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
   4125   __ StoreToSafepointRegisterSlot(result, rax);
   4126 }
   4127 
   4128 
   4129 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
   4130   Heap* heap = isolate()->heap();
   4131   ElementsKind boilerplate_elements_kind =
   4132       instr->hydrogen()->boilerplate_elements_kind();
   4133 
   4134   // Deopt if the array literal boilerplate ElementsKind is of a type different
   4135   // than the expected one. The check isn't necessary if the boilerplate has
   4136   // already been converted to FAST_ELEMENTS.
   4137   if (boilerplate_elements_kind != FAST_ELEMENTS) {
   4138     __ LoadHeapObject(rax, instr->hydrogen()->boilerplate_object());
   4139     __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
   4140     // Load the map's "bit field 2".
   4141     __ movb(rbx, FieldOperand(rbx, Map::kBitField2Offset));
   4142     // Retrieve elements_kind from bit field 2.
   4143     __ and_(rbx, Immediate(Map::kElementsKindMask));
   4144     __ cmpb(rbx, Immediate(boilerplate_elements_kind <<
   4145                            Map::kElementsKindShift));
   4146     DeoptimizeIf(not_equal, instr->environment());
   4147   }
   4148 
   4149   // Set up the parameters to the stub/runtime call.
   4150   __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   4151   __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
   4152   __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
   4153   // Boilerplate already exists, constant elements are never accessed.
   4154   // Pass an empty fixed array.
   4155   __ Push(Handle<FixedArray>(heap->empty_fixed_array()));
   4156 
   4157   // Pick the right runtime function or stub to call.
   4158   int length = instr->hydrogen()->length();
   4159   if (instr->hydrogen()->IsCopyOnWrite()) {
   4160     ASSERT(instr->hydrogen()->depth() == 1);
   4161     FastCloneShallowArrayStub::Mode mode =
   4162         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
   4163     FastCloneShallowArrayStub stub(mode, length);
   4164     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   4165   } else if (instr->hydrogen()->depth() > 1) {
   4166     CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
   4167   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
   4168     CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
   4169   } else {
   4170     FastCloneShallowArrayStub::Mode mode =
   4171         boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
   4172             ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
   4173             : FastCloneShallowArrayStub::CLONE_ELEMENTS;
   4174     FastCloneShallowArrayStub stub(mode, length);
   4175     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   4176   }
   4177 }
   4178 
   4179 
   4180 void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
   4181                             Register result,
   4182                             Register source,
   4183                             int* offset) {
   4184   ASSERT(!source.is(rcx));
   4185   ASSERT(!result.is(rcx));
   4186 
   4187   // Only elements backing stores for non-COW arrays need to be copied.
   4188   Handle<FixedArrayBase> elements(object->elements());
   4189   bool has_elements = elements->length() > 0 &&
   4190       elements->map() != isolate()->heap()->fixed_cow_array_map();
   4191 
   4192   // Increase the offset so that subsequent objects end up right after
   4193   // this object and its backing store.
   4194   int object_offset = *offset;
   4195   int object_size = object->map()->instance_size();
   4196   int elements_offset = *offset + object_size;
   4197   int elements_size = has_elements ? elements->Size() : 0;
   4198   *offset += object_size + elements_size;
   4199 
   4200   // Copy object header.
   4201   ASSERT(object->properties()->length() == 0);
   4202   int inobject_properties = object->map()->inobject_properties();
   4203   int header_size = object_size - inobject_properties * kPointerSize;
   4204   for (int i = 0; i < header_size; i += kPointerSize) {
   4205     if (has_elements && i == JSObject::kElementsOffset) {
   4206       __ lea(rcx, Operand(result, elements_offset));
   4207     } else {
   4208       __ movq(rcx, FieldOperand(source, i));
   4209     }
   4210     __ movq(FieldOperand(result, object_offset + i), rcx);
   4211   }
   4212 
   4213   // Copy in-object properties.
   4214   for (int i = 0; i < inobject_properties; i++) {
   4215     int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
   4216     Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i));
   4217     if (value->IsJSObject()) {
   4218       Handle<JSObject> value_object = Handle<JSObject>::cast(value);
   4219       __ lea(rcx, Operand(result, *offset));
   4220       __ movq(FieldOperand(result, total_offset), rcx);
   4221       __ LoadHeapObject(source, value_object);
   4222       EmitDeepCopy(value_object, result, source, offset);
   4223     } else if (value->IsHeapObject()) {
   4224       __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value));
   4225       __ movq(FieldOperand(result, total_offset), rcx);
   4226     } else {
   4227       __ movq(rcx, value, RelocInfo::NONE);
   4228       __ movq(FieldOperand(result, total_offset), rcx);
   4229     }
   4230   }
   4231 
   4232   if (has_elements) {
   4233     // Copy elements backing store header.
   4234     __ LoadHeapObject(source, elements);
   4235     for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
   4236       __ movq(rcx, FieldOperand(source, i));
   4237       __ movq(FieldOperand(result, elements_offset + i), rcx);
   4238     }
   4239 
   4240     // Copy elements backing store content.
   4241     int elements_length = elements->length();
   4242     if (elements->IsFixedDoubleArray()) {
   4243       Handle<FixedDoubleArray> double_array =
   4244           Handle<FixedDoubleArray>::cast(elements);
   4245       for (int i = 0; i < elements_length; i++) {
   4246         int64_t value = double_array->get_representation(i);
   4247         int total_offset =
   4248             elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
   4249         __ movq(rcx, value, RelocInfo::NONE);
   4250         __ movq(FieldOperand(result, total_offset), rcx);
   4251       }
   4252     } else if (elements->IsFixedArray()) {
   4253       for (int i = 0; i < elements_length; i++) {
   4254         int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
   4255         Handle<Object> value = JSObject::GetElement(object, i);
   4256         if (value->IsJSObject()) {
   4257           Handle<JSObject> value_object = Handle<JSObject>::cast(value);
   4258           __ lea(rcx, Operand(result, *offset));
   4259           __ movq(FieldOperand(result, total_offset), rcx);
   4260           __ LoadHeapObject(source, value_object);
   4261           EmitDeepCopy(value_object, result, source, offset);
   4262         } else if (value->IsHeapObject()) {
   4263           __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value));
   4264           __ movq(FieldOperand(result, total_offset), rcx);
   4265         } else {
   4266           __ movq(rcx, value, RelocInfo::NONE);
   4267           __ movq(FieldOperand(result, total_offset), rcx);
   4268         }
   4269       }
   4270     } else {
   4271       UNREACHABLE();
   4272     }
   4273   }
   4274 }
   4275 
   4276 
   4277 void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
   4278   int size = instr->hydrogen()->total_size();
   4279 
   4280   // Allocate all objects that are part of the literal in one big
   4281   // allocation. This avoids multiple limit checks.
   4282   Label allocated, runtime_allocate;
   4283   __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
   4284   __ jmp(&allocated);
   4285 
   4286   __ bind(&runtime_allocate);
   4287   __ Push(Smi::FromInt(size));
   4288   CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
   4289 
   4290   __ bind(&allocated);
   4291   int offset = 0;
   4292   __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate());
   4293   EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset);
   4294   ASSERT_EQ(size, offset);
   4295 }
   4296 
   4297 
   4298 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
   4299   Handle<FixedArray> literals(instr->environment()->closure()->literals());
   4300   Handle<FixedArray> constant_properties =
   4301       instr->hydrogen()->constant_properties();
   4302 
   4303   // Set up the parameters to the stub/runtime call.
   4304   __ PushHeapObject(literals);
   4305   __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
   4306   __ Push(constant_properties);
   4307   int flags = instr->hydrogen()->fast_elements()
   4308       ? ObjectLiteral::kFastElements
   4309       : ObjectLiteral::kNoFlags;
   4310   flags |= instr->hydrogen()->has_function()
   4311       ? ObjectLiteral::kHasFunction
   4312       : ObjectLiteral::kNoFlags;
   4313   __ Push(Smi::FromInt(flags));
   4314 
   4315   // Pick the right runtime function or stub to call.
   4316   int properties_count = constant_properties->length() / 2;
   4317   if (instr->hydrogen()->depth() > 1) {
   4318     CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
   4319   } else if (flags != ObjectLiteral::kFastElements ||
   4320       properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
   4321     CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
   4322   } else {
   4323     FastCloneShallowObjectStub stub(properties_count);
   4324     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   4325   }
   4326 }
   4327 
   4328 
   4329 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
   4330   ASSERT(ToRegister(instr->InputAt(0)).is(rax));
   4331   __ push(rax);
   4332   CallRuntime(Runtime::kToFastProperties, 1, instr);
   4333 }
   4334 
   4335 
   4336 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
   4337   Label materialized;
   4338   // Registers will be used as follows:
   4339   // rdi = JS function.
   4340   // rcx = literals array.
   4341   // rbx = regexp literal.
   4342   // rax = regexp literal clone.
   4343   __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   4344   __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
   4345   int literal_offset = FixedArray::kHeaderSize +
   4346       instr->hydrogen()->literal_index() * kPointerSize;
   4347   __ movq(rbx, FieldOperand(rcx, literal_offset));
   4348   __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
   4349   __ j(not_equal, &materialized, Label::kNear);
   4350 
   4351   // Create regexp literal using runtime function
   4352   // Result will be in rax.
   4353   __ push(rcx);
   4354   __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
   4355   __ Push(instr->hydrogen()->pattern());
   4356   __ Push(instr->hydrogen()->flags());
   4357   CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
   4358   __ movq(rbx, rax);
   4359 
   4360   __ bind(&materialized);
   4361   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
   4362   Label allocated, runtime_allocate;
   4363   __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
   4364   __ jmp(&allocated);
   4365 
   4366   __ bind(&runtime_allocate);
   4367   __ push(rbx);
   4368   __ Push(Smi::FromInt(size));
   4369   CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
   4370   __ pop(rbx);
   4371 
   4372   __ bind(&allocated);
   4373   // Copy the content into the newly allocated memory.
   4374   // (Unroll copy loop once for better throughput).
   4375   for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
   4376     __ movq(rdx, FieldOperand(rbx, i));
   4377     __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
   4378     __ movq(FieldOperand(rax, i), rdx);
   4379     __ movq(FieldOperand(rax, i + kPointerSize), rcx);
   4380   }
   4381   if ((size % (2 * kPointerSize)) != 0) {
   4382     __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
   4383     __ movq(FieldOperand(rax, size - kPointerSize), rdx);
   4384   }
   4385 }
   4386 
   4387 
   4388 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
   4389   // Use the fast case closure allocation code that allocates in new
   4390   // space for nested functions that don't need literals cloning.
   4391   Handle<SharedFunctionInfo> shared_info = instr->shared_info();
   4392   bool pretenure = instr->hydrogen()->pretenure();
   4393   if (!pretenure && shared_info->num_literals() == 0) {
   4394     FastNewClosureStub stub(shared_info->language_mode());
   4395     __ Push(shared_info);
   4396     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   4397   } else {
   4398     __ push(rsi);
   4399     __ Push(shared_info);
   4400     __ PushRoot(pretenure ?
   4401                 Heap::kTrueValueRootIndex :
   4402                 Heap::kFalseValueRootIndex);
   4403     CallRuntime(Runtime::kNewClosure, 3, instr);
   4404   }
   4405 }
   4406 
   4407 
   4408 void LCodeGen::DoTypeof(LTypeof* instr) {
   4409   LOperand* input = instr->InputAt(0);
   4410   EmitPushTaggedOperand(input);
   4411   CallRuntime(Runtime::kTypeof, 1, instr);
   4412 }
   4413 
   4414 
   4415 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
   4416   ASSERT(!operand->IsDoubleRegister());
   4417   if (operand->IsConstantOperand()) {
   4418     Handle<Object> object = ToHandle(LConstantOperand::cast(operand));
   4419     if (object->IsSmi()) {
   4420       __ Push(Handle<Smi>::cast(object));
   4421     } else {
   4422       __ PushHeapObject(Handle<HeapObject>::cast(object));
   4423     }
   4424   } else if (operand->IsRegister()) {
   4425     __ push(ToRegister(operand));
   4426   } else {
   4427     __ push(ToOperand(operand));
   4428   }
   4429 }
   4430 
   4431 
   4432 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
   4433   Register input = ToRegister(instr->InputAt(0));
   4434   int true_block = chunk_->LookupDestination(instr->true_block_id());
   4435   int false_block = chunk_->LookupDestination(instr->false_block_id());
   4436   Label* true_label = chunk_->GetAssemblyLabel(true_block);
   4437   Label* false_label = chunk_->GetAssemblyLabel(false_block);
   4438 
   4439   Condition final_branch_condition =
   4440       EmitTypeofIs(true_label, false_label, input, instr->type_literal());
   4441   if (final_branch_condition != no_condition) {
   4442     EmitBranch(true_block, false_block, final_branch_condition);
   4443   }
   4444 }
   4445 
   4446 
   4447 Condition LCodeGen::EmitTypeofIs(Label* true_label,
   4448                                  Label* false_label,
   4449                                  Register input,
   4450                                  Handle<String> type_name) {
   4451   Condition final_branch_condition = no_condition;
   4452   if (type_name->Equals(heap()->number_symbol())) {
   4453     __ JumpIfSmi(input, true_label);
   4454     __ CompareRoot(FieldOperand(input, HeapObject::kMapOffset),
   4455                    Heap::kHeapNumberMapRootIndex);
   4456 
   4457     final_branch_condition = equal;
   4458 
   4459   } else if (type_name->Equals(heap()->string_symbol())) {
   4460     __ JumpIfSmi(input, false_label);
   4461     __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
   4462     __ j(above_equal, false_label);
   4463     __ testb(FieldOperand(input, Map::kBitFieldOffset),
   4464              Immediate(1 << Map::kIsUndetectable));
   4465     final_branch_condition = zero;
   4466 
   4467   } else if (type_name->Equals(heap()->boolean_symbol())) {
   4468     __ CompareRoot(input, Heap::kTrueValueRootIndex);
   4469     __ j(equal, true_label);
   4470     __ CompareRoot(input, Heap::kFalseValueRootIndex);
   4471     final_branch_condition = equal;
   4472 
   4473   } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
   4474     __ CompareRoot(input, Heap::kNullValueRootIndex);
   4475     final_branch_condition = equal;
   4476 
   4477   } else if (type_name->Equals(heap()->undefined_symbol())) {
   4478     __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
   4479     __ j(equal, true_label);
   4480     __ JumpIfSmi(input, false_label);
   4481     // Check for undetectable objects => true.
   4482     __ movq(input, FieldOperand(input, HeapObject::kMapOffset));
   4483     __ testb(FieldOperand(input, Map::kBitFieldOffset),
   4484              Immediate(1 << Map::kIsUndetectable));
   4485     final_branch_condition = not_zero;
   4486 
   4487   } else if (type_name->Equals(heap()->function_symbol())) {
   4488     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   4489     __ JumpIfSmi(input, false_label);
   4490     __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
   4491     __ j(equal, true_label);
   4492     __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE);
   4493     final_branch_condition = equal;
   4494 
   4495   } else if (type_name->Equals(heap()->object_symbol())) {
   4496     __ JumpIfSmi(input, false_label);
   4497     if (!FLAG_harmony_typeof) {
   4498       __ CompareRoot(input, Heap::kNullValueRootIndex);
   4499       __ j(equal, true_label);
   4500     }
   4501     __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
   4502     __ j(below, false_label);
   4503     __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
   4504     __ j(above, false_label);
   4505     // Check for undetectable objects => false.
   4506     __ testb(FieldOperand(input, Map::kBitFieldOffset),
   4507              Immediate(1 << Map::kIsUndetectable));
   4508     final_branch_condition = zero;
   4509 
   4510   } else {
   4511     __ jmp(false_label);
   4512   }
   4513 
   4514   return final_branch_condition;
   4515 }
   4516 
   4517 
   4518 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
   4519   Register temp = ToRegister(instr->TempAt(0));
   4520   int true_block = chunk_->LookupDestination(instr->true_block_id());
   4521   int false_block = chunk_->LookupDestination(instr->false_block_id());
   4522 
   4523   EmitIsConstructCall(temp);
   4524   EmitBranch(true_block, false_block, equal);
   4525 }
   4526 
   4527 
   4528 void LCodeGen::EmitIsConstructCall(Register temp) {
   4529   // Get the frame pointer for the calling frame.
   4530   __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   4531 
   4532   // Skip the arguments adaptor frame if it exists.
   4533   Label check_frame_marker;
   4534   __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset),
   4535          Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   4536   __ j(not_equal, &check_frame_marker, Label::kNear);
   4537   __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
   4538 
   4539   // Check the marker in the calling frame.
   4540   __ bind(&check_frame_marker);
   4541   __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
   4542          Smi::FromInt(StackFrame::CONSTRUCT));
   4543 }
   4544 
   4545 
   4546 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
   4547   // Ensure that we have enough space after the previous lazy-bailout
   4548   // instruction for patching the code here.
   4549   int current_pc = masm()->pc_offset();
   4550   if (current_pc < last_lazy_deopt_pc_ + space_needed) {
   4551     int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
   4552     __ Nop(padding_size);
   4553   }
   4554 }
   4555 
   4556 
   4557 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
   4558   EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
   4559   last_lazy_deopt_pc_ = masm()->pc_offset();
   4560   ASSERT(instr->HasEnvironment());
   4561   LEnvironment* env = instr->environment();
   4562   RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
   4563   safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
   4564 }
   4565 
   4566 
   4567 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
   4568   DeoptimizeIf(no_condition, instr->environment());
   4569 }
   4570 
   4571 
   4572 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
   4573   LOperand* obj = instr->object();
   4574   LOperand* key = instr->key();
   4575   EmitPushTaggedOperand(obj);
   4576   EmitPushTaggedOperand(key);
   4577   ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
   4578   LPointerMap* pointers = instr->pointer_map();
   4579   RecordPosition(pointers->position());
   4580   // Create safepoint generator that will also ensure enough space in the
   4581   // reloc info for patching in deoptimization (since this is invoking a
   4582   // builtin)
   4583   SafepointGenerator safepoint_generator(
   4584       this, pointers, Safepoint::kLazyDeopt);
   4585   __ Push(Smi::FromInt(strict_mode_flag()));
   4586   __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
   4587 }
   4588 
   4589 
   4590 void LCodeGen::DoIn(LIn* instr) {
   4591   LOperand* obj = instr->object();
   4592   LOperand* key = instr->key();
   4593   EmitPushTaggedOperand(key);
   4594   EmitPushTaggedOperand(obj);
   4595   ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
   4596   LPointerMap* pointers = instr->pointer_map();
   4597   RecordPosition(pointers->position());
   4598   SafepointGenerator safepoint_generator(
   4599       this, pointers, Safepoint::kLazyDeopt);
   4600   __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
   4601 }
   4602 
   4603 
   4604 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
   4605   PushSafepointRegistersScope scope(this);
   4606   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   4607   __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
   4608   RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
   4609   ASSERT(instr->HasEnvironment());
   4610   LEnvironment* env = instr->environment();
   4611   safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
   4612 }
   4613 
   4614 
   4615 void LCodeGen::DoStackCheck(LStackCheck* instr) {
   4616   class DeferredStackCheck: public LDeferredCode {
   4617    public:
   4618     DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
   4619         : LDeferredCode(codegen), instr_(instr) { }
   4620     virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
   4621     virtual LInstruction* instr() { return instr_; }
   4622    private:
   4623     LStackCheck* instr_;
   4624   };
   4625 
   4626   ASSERT(instr->HasEnvironment());
   4627   LEnvironment* env = instr->environment();
   4628   // There is no LLazyBailout instruction for stack-checks. We have to
   4629   // prepare for lazy deoptimization explicitly here.
   4630   if (instr->hydrogen()->is_function_entry()) {
   4631     // Perform stack overflow check.
   4632     Label done;
   4633     __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
   4634     __ j(above_equal, &done, Label::kNear);
   4635     StackCheckStub stub;
   4636     CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
   4637     EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
   4638     last_lazy_deopt_pc_ = masm()->pc_offset();
   4639     __ bind(&done);
   4640     RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
   4641     safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
   4642   } else {
   4643     ASSERT(instr->hydrogen()->is_backwards_branch());
   4644     // Perform stack overflow check if this goto needs it before jumping.
   4645     DeferredStackCheck* deferred_stack_check =
   4646         new DeferredStackCheck(this, instr);
   4647     __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
   4648     __ j(below, deferred_stack_check->entry());
   4649     EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
   4650     last_lazy_deopt_pc_ = masm()->pc_offset();
   4651     __ bind(instr->done_label());
   4652     deferred_stack_check->SetExit(instr->done_label());
   4653     RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
   4654     // Don't record a deoptimization index for the safepoint here.
   4655     // This will be done explicitly when emitting call and the safepoint in
   4656     // the deferred code.
   4657   }
   4658 }
   4659 
   4660 
   4661 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
   4662   // This is a pseudo-instruction that ensures that the environment here is
   4663   // properly registered for deoptimization and records the assembler's PC
   4664   // offset.
   4665   LEnvironment* environment = instr->environment();
   4666   environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
   4667                                    instr->SpilledDoubleRegisterArray());
   4668 
   4669   // If the environment were already registered, we would have no way of
   4670   // backpatching it with the spill slot operands.
   4671   ASSERT(!environment->HasBeenRegistered());
   4672   RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
   4673   ASSERT(osr_pc_offset_ == -1);
   4674   osr_pc_offset_ = masm()->pc_offset();
   4675 }
   4676 
   4677 
   4678 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
   4679   __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
   4680   DeoptimizeIf(equal, instr->environment());
   4681 
   4682   Register null_value = rdi;
   4683   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
   4684   __ cmpq(rax, null_value);
   4685   DeoptimizeIf(equal, instr->environment());
   4686 
   4687   Condition cc = masm()->CheckSmi(rax);
   4688   DeoptimizeIf(cc, instr->environment());
   4689 
   4690   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   4691   __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
   4692   DeoptimizeIf(below_equal, instr->environment());
   4693 
   4694   Label use_cache, call_runtime;
   4695   __ CheckEnumCache(null_value, &call_runtime);
   4696 
   4697   __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
   4698   __ jmp(&use_cache, Label::kNear);
   4699 
   4700   // Get the set of properties to enumerate.
   4701   __ bind(&call_runtime);
   4702   __ push(rax);
   4703   CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
   4704 
   4705   __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
   4706                  Heap::kMetaMapRootIndex);
   4707   DeoptimizeIf(not_equal, instr->environment());
   4708   __ bind(&use_cache);
   4709 }
   4710 
   4711 
   4712 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
   4713   Register map = ToRegister(instr->map());
   4714   Register result = ToRegister(instr->result());
   4715   __ LoadInstanceDescriptors(map, result);
   4716   __ movq(result,
   4717           FieldOperand(result, DescriptorArray::kEnumerationIndexOffset));
   4718   __ movq(result,
   4719           FieldOperand(result, FixedArray::SizeFor(instr->idx())));
   4720   Condition cc = masm()->CheckSmi(result);
   4721   DeoptimizeIf(cc, instr->environment());
   4722 }
   4723 
   4724 
   4725 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
   4726   Register object = ToRegister(instr->value());
   4727   __ cmpq(ToRegister(instr->map()),
   4728           FieldOperand(object, HeapObject::kMapOffset));
   4729   DeoptimizeIf(not_equal, instr->environment());
   4730 }
   4731 
   4732 
   4733 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
   4734   Register object = ToRegister(instr->object());
   4735   Register index = ToRegister(instr->index());
   4736 
   4737   Label out_of_object, done;
   4738   __ SmiToInteger32(index, index);
   4739   __ cmpl(index, Immediate(0));
   4740   __ j(less, &out_of_object);
   4741   __ movq(object, FieldOperand(object,
   4742                                index,
   4743                                times_pointer_size,
   4744                                JSObject::kHeaderSize));
   4745   __ jmp(&done, Label::kNear);
   4746 
   4747   __ bind(&out_of_object);
   4748   __ movq(object, FieldOperand(object, JSObject::kPropertiesOffset));
   4749   __ negl(index);
   4750   // Index is now equal to out of object property index plus 1.
   4751   __ movq(object, FieldOperand(object,
   4752                                index,
   4753                                times_pointer_size,
   4754                                FixedArray::kHeaderSize - kPointerSize));
   4755   __ bind(&done);
   4756 }
   4757 
   4758 
   4759 #undef __
   4760 
   4761 } }  // namespace v8::internal
   4762 
   4763 #endif  // V8_TARGET_ARCH_X64
   4764