Home | History | Annotate | Download | only in arm
      1 // Copyright 2009 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #include "codegen-inl.h"
     31 #include "register-allocator-inl.h"
     32 #include "scopes.h"
     33 
     34 namespace v8 {
     35 namespace internal {
     36 
     37 // -------------------------------------------------------------------------
     38 // VirtualFrame implementation.
     39 
     40 #define __ ACCESS_MASM(masm())
     41 
     42 
     43 // On entry to a function, the virtual frame already contains the
     44 // receiver and the parameters.  All initial frame elements are in
     45 // memory.
     46 VirtualFrame::VirtualFrame()
     47     : elements_(parameter_count() + local_count() + kPreallocatedElements),
     48       stack_pointer_(parameter_count()) {  // 0-based index of TOS.
     49   for (int i = 0; i <= stack_pointer_; i++) {
     50     elements_.Add(FrameElement::MemoryElement(NumberInfo::kUnknown));
     51   }
     52   for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
     53     register_locations_[i] = kIllegalIndex;
     54   }
     55 }
     56 
     57 
     58 void VirtualFrame::SyncElementBelowStackPointer(int index) {
     59   UNREACHABLE();
     60 }
     61 
     62 
     63 void VirtualFrame::SyncElementByPushing(int index) {
     64   UNREACHABLE();
     65 }
     66 
     67 
     68 void VirtualFrame::SyncRange(int begin, int end) {
     69   // All elements are in memory on ARM (ie, synced).
     70 #ifdef DEBUG
     71   for (int i = begin; i <= end; i++) {
     72     ASSERT(elements_[i].is_synced());
     73   }
     74 #endif
     75 }
     76 
     77 
     78 void VirtualFrame::MergeTo(VirtualFrame* expected) {
     79   // ARM frames are currently always in memory.
     80   ASSERT(Equals(expected));
     81 }
     82 
     83 
     84 void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) {
     85   UNREACHABLE();
     86 }
     87 
     88 
     89 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) {
     90   UNREACHABLE();
     91 }
     92 
     93 
     94 void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) {
     95   UNREACHABLE();
     96 }
     97 
     98 
     99 void VirtualFrame::Enter() {
    100   Comment cmnt(masm(), "[ Enter JS frame");
    101 
    102 #ifdef DEBUG
    103   // Verify that r1 contains a JS function.  The following code relies
    104   // on r2 being available for use.
    105   if (FLAG_debug_code) {
    106     Label map_check, done;
    107     __ tst(r1, Operand(kSmiTagMask));
    108     __ b(ne, &map_check);
    109     __ stop("VirtualFrame::Enter - r1 is not a function (smi check).");
    110     __ bind(&map_check);
    111     __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
    112     __ b(eq, &done);
    113     __ stop("VirtualFrame::Enter - r1 is not a function (map check).");
    114     __ bind(&done);
    115   }
    116 #endif  // DEBUG
    117 
    118   // We are about to push four values to the frame.
    119   Adjust(4);
    120   __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
    121   // Adjust FP to point to saved FP.
    122   __ add(fp, sp, Operand(2 * kPointerSize));
    123   cgen()->allocator()->Unuse(r1);
    124   cgen()->allocator()->Unuse(lr);
    125 }
    126 
    127 
    128 void VirtualFrame::Exit() {
    129   Comment cmnt(masm(), "[ Exit JS frame");
    130   // Record the location of the JS exit code for patching when setting
    131   // break point.
    132   __ RecordJSReturn();
    133 
    134   // Drop the execution stack down to the frame pointer and restore the caller
    135   // frame pointer and return address.
    136   __ mov(sp, fp);
    137   __ ldm(ia_w, sp, fp.bit() | lr.bit());
    138 }
    139 
    140 
    141 void VirtualFrame::AllocateStackSlots() {
    142   int count = local_count();
    143   if (count > 0) {
    144     Comment cmnt(masm(), "[ Allocate space for locals");
    145     Adjust(count);
    146     // Initialize stack slots with 'undefined' value.
    147     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
    148     __ LoadRoot(r2, Heap::kStackLimitRootIndex);
    149     if (count < kLocalVarBound) {
    150       // For less locals the unrolled loop is more compact.
    151       for (int i = 0; i < count; i++) {
    152         __ push(ip);
    153       }
    154     } else {
    155       // For more locals a loop in generated code is more compact.
    156       Label alloc_locals_loop;
    157       __ mov(r1, Operand(count));
    158       __ bind(&alloc_locals_loop);
    159       __ push(ip);
    160       __ sub(r1, r1, Operand(1), SetCC);
    161       __ b(ne, &alloc_locals_loop);
    162     }
    163   } else {
    164     __ LoadRoot(r2, Heap::kStackLimitRootIndex);
    165   }
    166   // Check the stack for overflow or a break request.
    167   // Put the lr setup instruction in the delay slot.  The kInstrSize is added
    168   // to the implicit 8 byte offset that always applies to operations with pc
    169   // and gives a return address 12 bytes down.
    170   masm()->add(lr, pc, Operand(Assembler::kInstrSize));
    171   masm()->cmp(sp, Operand(r2));
    172   StackCheckStub stub;
    173   // Call the stub if lower.
    174   masm()->mov(pc,
    175               Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
    176                       RelocInfo::CODE_TARGET),
    177               LeaveCC,
    178               lo);
    179 }
    180 
    181 
    182 
    183 void VirtualFrame::SaveContextRegister() {
    184   UNIMPLEMENTED();
    185 }
    186 
    187 
    188 void VirtualFrame::RestoreContextRegister() {
    189   UNIMPLEMENTED();
    190 }
    191 
    192 
    193 void VirtualFrame::PushReceiverSlotAddress() {
    194   UNIMPLEMENTED();
    195 }
    196 
    197 
    198 int VirtualFrame::InvalidateFrameSlotAt(int index) {
    199   UNIMPLEMENTED();
    200   return kIllegalIndex;
    201 }
    202 
    203 
    204 void VirtualFrame::TakeFrameSlotAt(int index) {
    205   UNIMPLEMENTED();
    206 }
    207 
    208 
    209 void VirtualFrame::StoreToFrameSlotAt(int index) {
    210   UNIMPLEMENTED();
    211 }
    212 
    213 
    214 void VirtualFrame::PushTryHandler(HandlerType type) {
    215   // Grow the expression stack by handler size less one (the return
    216   // address in lr is already counted by a call instruction).
    217   Adjust(kHandlerSize - 1);
    218   __ PushTryHandler(IN_JAVASCRIPT, type);
    219 }
    220 
    221 
    222 void VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
    223   Forget(arg_count);
    224   ASSERT(cgen()->HasValidEntryRegisters());
    225   __ CallRuntime(f, arg_count);
    226 }
    227 
    228 
    229 void VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
    230   Forget(arg_count);
    231   ASSERT(cgen()->HasValidEntryRegisters());
    232   __ CallRuntime(id, arg_count);
    233 }
    234 
    235 
    236 #ifdef ENABLE_DEBUGGER_SUPPORT
    237 void VirtualFrame::DebugBreak() {
    238   ASSERT(cgen()->HasValidEntryRegisters());
    239   __ DebugBreak();
    240 }
    241 #endif
    242 
    243 
    244 void VirtualFrame::InvokeBuiltin(Builtins::JavaScript id,
    245                                  InvokeJSFlags flags,
    246                                  int arg_count) {
    247   Forget(arg_count);
    248   __ InvokeBuiltin(id, flags);
    249 }
    250 
    251 
    252 void VirtualFrame::CallCodeObject(Handle<Code> code,
    253                                   RelocInfo::Mode rmode,
    254                                   int dropped_args) {
    255   switch (code->kind()) {
    256     case Code::CALL_IC:
    257     case Code::FUNCTION:
    258       break;
    259     case Code::KEYED_LOAD_IC:
    260     case Code::LOAD_IC:
    261     case Code::KEYED_STORE_IC:
    262     case Code::STORE_IC:
    263       ASSERT(dropped_args == 0);
    264       break;
    265     case Code::BUILTIN:
    266       ASSERT(*code == Builtins::builtin(Builtins::JSConstructCall));
    267       break;
    268     default:
    269       UNREACHABLE();
    270       break;
    271   }
    272   Forget(dropped_args);
    273   ASSERT(cgen()->HasValidEntryRegisters());
    274   __ Call(code, rmode);
    275 }
    276 
    277 
    278 void VirtualFrame::Drop(int count) {
    279   ASSERT(count >= 0);
    280   ASSERT(height() >= count);
    281   int num_virtual_elements = (element_count() - 1) - stack_pointer_;
    282 
    283   // Emit code to lower the stack pointer if necessary.
    284   if (num_virtual_elements < count) {
    285     int num_dropped = count - num_virtual_elements;
    286     stack_pointer_ -= num_dropped;
    287     __ add(sp, sp, Operand(num_dropped * kPointerSize));
    288   }
    289 
    290   // Discard elements from the virtual frame and free any registers.
    291   for (int i = 0; i < count; i++) {
    292     FrameElement dropped = elements_.RemoveLast();
    293     if (dropped.is_register()) {
    294       Unuse(dropped.reg());
    295     }
    296   }
    297 }
    298 
    299 
    300 Result VirtualFrame::Pop() {
    301   UNIMPLEMENTED();
    302   return Result();
    303 }
    304 
    305 
    306 void VirtualFrame::EmitPop(Register reg) {
    307   ASSERT(stack_pointer_ == element_count() - 1);
    308   stack_pointer_--;
    309   elements_.RemoveLast();
    310   __ pop(reg);
    311 }
    312 
    313 
    314 void VirtualFrame::EmitPush(Register reg) {
    315   ASSERT(stack_pointer_ == element_count() - 1);
    316   elements_.Add(FrameElement::MemoryElement(NumberInfo::kUnknown));
    317   stack_pointer_++;
    318   __ push(reg);
    319 }
    320 
    321 
    322 void VirtualFrame::EmitPushMultiple(int count, int src_regs) {
    323   ASSERT(stack_pointer_ == element_count() - 1);
    324   Adjust(count);
    325   __ stm(db_w, sp, src_regs);
    326 }
    327 
    328 
    329 #undef __
    330 
    331 } }  // namespace v8::internal
    332