Home | History | Annotate | Download | only in x64
      1 // Copyright 2010 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #include "codegen-inl.h"
     31 #include "fast-codegen.h"
     32 
     33 namespace v8 {
     34 namespace internal {
     35 
     36 #define __ ACCESS_MASM(masm())
     37 
     38 Register FastCodeGenerator::accumulator0() { return rax; }
     39 Register FastCodeGenerator::accumulator1() { return rdx; }
     40 Register FastCodeGenerator::scratch0() { return rcx; }
     41 Register FastCodeGenerator::scratch1() { return rdi; }
     42 Register FastCodeGenerator::receiver_reg() { return rbx; }
     43 Register FastCodeGenerator::context_reg() { return rsi; }
     44 
     45 
     46 void FastCodeGenerator::EmitLoadReceiver() {
     47   // Offset 2 is due to return address and saved frame pointer.
     48   int index = 2 + scope()->num_parameters();
     49   __ movq(receiver_reg(), Operand(rbp, index * kPointerSize));
     50 }
     51 
     52 
     53 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
     54   ASSERT(!destination().is(no_reg));
     55   ASSERT(cell->IsJSGlobalPropertyCell());
     56 
     57   __ Move(destination(), cell);
     58   __ movq(destination(),
     59           FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset));
     60   if (FLAG_debug_code) {
     61     __ Cmp(destination(), Factory::the_hole_value());
     62     __ Check(not_equal, "DontDelete cells can't contain the hole");
     63   }
     64 
     65   // The loaded value is not known to be a smi.
     66   clear_as_smi(destination());
     67 }
     68 
     69 
     70 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
     71   LookupResult lookup;
     72   info()->receiver()->Lookup(*name, &lookup);
     73 
     74   ASSERT(lookup.holder() == *info()->receiver());
     75   ASSERT(lookup.type() == FIELD);
     76   Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
     77   int index = lookup.GetFieldIndex() - map->inobject_properties();
     78   int offset = index * kPointerSize;
     79 
     80   // We will emit the write barrier unless the stored value is statically
     81   // known to be a smi.
     82   bool needs_write_barrier = !is_smi(accumulator0());
     83 
     84   // Perform the store.  Negative offsets are inobject properties.
     85   if (offset < 0) {
     86     offset += map->instance_size();
     87     __ movq(FieldOperand(receiver_reg(), offset), accumulator0());
     88     if (needs_write_barrier) {
     89       // Preserve receiver from write barrier.
     90       __ movq(scratch0(), receiver_reg());
     91     }
     92   } else {
     93     offset += FixedArray::kHeaderSize;
     94     __ movq(scratch0(),
     95             FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
     96     __ movq(FieldOperand(scratch0(), offset), accumulator0());
     97   }
     98 
     99   if (needs_write_barrier) {
    100     if (destination().is(no_reg)) {
    101       // After RecordWrite accumulator0 is only accidently a smi, but it is
    102       // already marked as not known to be one.
    103       __ RecordWrite(scratch0(), offset, accumulator0(), scratch1());
    104     } else {
    105       // Copy the value to the other accumulator to preserve a copy from the
    106       // write barrier. One of the accumulators is available as a scratch
    107       // register.  Neither is a smi.
    108       __ movq(accumulator1(), accumulator0());
    109       clear_as_smi(accumulator1());
    110       Register value_scratch = other_accumulator(destination());
    111       __ RecordWrite(scratch0(), offset, value_scratch, scratch1());
    112     }
    113   } else if (destination().is(accumulator1())) {
    114     __ movq(accumulator1(), accumulator0());
    115     // Is a smi because we do not need the write barrier.
    116     set_as_smi(accumulator1());
    117   }
    118 }
    119 
    120 
    121 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
    122   ASSERT(!destination().is(no_reg));
    123   LookupResult lookup;
    124   info()->receiver()->Lookup(*name, &lookup);
    125 
    126   ASSERT(lookup.holder() == *info()->receiver());
    127   ASSERT(lookup.type() == FIELD);
    128   Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
    129   int index = lookup.GetFieldIndex() - map->inobject_properties();
    130   int offset = index * kPointerSize;
    131 
    132   // Perform the load.  Negative offsets are inobject properties.
    133   if (offset < 0) {
    134     offset += map->instance_size();
    135     __ movq(destination(), FieldOperand(receiver_reg(), offset));
    136   } else {
    137     offset += FixedArray::kHeaderSize;
    138     __ movq(scratch0(),
    139             FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
    140     __ movq(destination(), FieldOperand(scratch0(), offset));
    141   }
    142 
    143   // The loaded value is not known to be a smi.
    144   clear_as_smi(destination());
    145 }
    146 
    147 
    148 void FastCodeGenerator::EmitBitOr() {
    149   if (is_smi(accumulator0()) && is_smi(accumulator1())) {
    150     // If both operands are known to be a smi then there is no need to check
    151     // the operands or result.
    152     if (destination().is(no_reg)) {
    153       __ or_(accumulator1(), accumulator0());
    154     } else {
    155       // Leave the result in the destination register.  Bitwise or is
    156       // commutative.
    157       __ or_(destination(), other_accumulator(destination()));
    158     }
    159   } else {
    160     // Left is in accumulator1, right in accumulator0.
    161     if (destination().is(accumulator0())) {
    162       __ movq(scratch0(), accumulator0());
    163       __ or_(destination(), accumulator1());  // Or is commutative.
    164       Label* bailout =
    165           info()->AddBailout(accumulator1(), scratch0());  // Left, right.
    166       __ JumpIfNotSmi(destination(), bailout);
    167     } else if (destination().is(accumulator1())) {
    168       __ movq(scratch0(), accumulator1());
    169       __ or_(destination(), accumulator0());
    170       Label* bailout = info()->AddBailout(scratch0(), accumulator0());
    171       __ JumpIfNotSmi(destination(), bailout);
    172     } else {
    173       ASSERT(destination().is(no_reg));
    174       __ movq(scratch0(), accumulator1());
    175       __ or_(scratch0(), accumulator0());
    176       Label* bailout = info()->AddBailout(accumulator1(), accumulator0());
    177       __ JumpIfNotSmi(scratch0(), bailout);
    178     }
    179   }
    180 
    181   // If we didn't bailout, the result (in fact, both inputs too) is known to
    182   // be a smi.
    183   set_as_smi(accumulator0());
    184   set_as_smi(accumulator1());
    185 }
    186 
    187 
    188 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) {
    189   ASSERT(info_ == NULL);
    190   info_ = compilation_info;
    191 
    192   // Save the caller's frame pointer and set up our own.
    193   Comment prologue_cmnt(masm(), ";; Prologue");
    194   __ push(rbp);
    195   __ movq(rbp, rsp);
    196   __ push(rsi);  // Context.
    197   __ push(rdi);  // Closure.
    198   // Note that we keep a live register reference to esi (context) at this
    199   // point.
    200 
    201   Label* bailout_to_beginning = info()->AddBailout();
    202   // Receiver (this) is allocated to a fixed register.
    203   if (info()->has_this_properties()) {
    204     Comment cmnt(masm(), ";; MapCheck(this)");
    205     if (FLAG_print_ir) {
    206       PrintF("MapCheck(this)\n");
    207     }
    208     ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
    209     Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
    210     Handle<Map> map(object->map());
    211     EmitLoadReceiver();
    212     __ CheckMap(receiver_reg(), map, bailout_to_beginning, false);
    213   }
    214 
    215   // If there is a global variable access check if the global object is the
    216   // same as at lazy-compilation time.
    217   if (info()->has_globals()) {
    218     Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
    219     if (FLAG_print_ir) {
    220       PrintF("MapCheck(GLOBAL)\n");
    221     }
    222     ASSERT(info()->has_global_object());
    223     Handle<Map> map(info()->global_object()->map());
    224     __ movq(scratch0(), CodeGenerator::GlobalObject());
    225     __ CheckMap(scratch0(), map, bailout_to_beginning, true);
    226   }
    227 
    228   VisitStatements(info()->function()->body());
    229 
    230   Comment return_cmnt(masm(), ";; Return(<undefined>)");
    231   if (FLAG_print_ir) {
    232     PrintF("Return(<undefined>)\n");
    233   }
    234   __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
    235   __ movq(rsp, rbp);
    236   __ pop(rbp);
    237   __ ret((scope()->num_parameters() + 1) * kPointerSize);
    238 }
    239 
    240 
    241 #undef __
    242 
    243 
    244 } }  // namespace v8::internal
    245