Home | History | Annotate | Download | only in jit
      1 /*
      2  * Copyright (C) 2008 Apple Inc. All rights reserved.
      3  *
      4  * Redistribution and use in source and binary forms, with or without
      5  * modification, are permitted provided that the following conditions
      6  * are met:
      7  * 1. Redistributions of source code must retain the above copyright
      8  *    notice, this list of conditions and the following disclaimer.
      9  * 2. Redistributions in binary form must reproduce the above copyright
     10  *    notice, this list of conditions and the following disclaimer in the
     11  *    documentation and/or other materials provided with the distribution.
     12  *
     13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
     14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
     17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
     18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
     19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
     20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
     21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     24  */
     25 
     26 #include "config.h"
     27 
     28 #if ENABLE(JIT)
     29 #if USE(JSVALUE32_64)
     30 #include "JIT.h"
     31 
     32 #include "CodeBlock.h"
     33 #include "Interpreter.h"
     34 #include "JITInlineMethods.h"
     35 #include "JITStubCall.h"
     36 #include "JSArray.h"
     37 #include "JSFunction.h"
     38 #include "ResultType.h"
     39 #include "SamplingTool.h"
     40 
     41 #ifndef NDEBUG
     42 #include <stdio.h>
     43 #endif
     44 
     45 using namespace std;
     46 
     47 namespace JSC {
     48 
     49 void JIT::compileOpCallInitializeCallFrame()
     50 {
     51     // regT0 holds callee, regT1 holds argCount
     52     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT3); // scopeChain
     53     emitPutIntToCallFrameHeader(regT1, RegisterFile::ArgumentCount);
     54     emitPutCellToCallFrameHeader(regT0, RegisterFile::Callee);
     55     emitPutCellToCallFrameHeader(regT3, RegisterFile::ScopeChain);
     56 }
     57 
     58 void JIT::emit_op_call_put_result(Instruction* instruction)
     59 {
     60     int dst = instruction[1].u.operand;
     61     emitStore(dst, regT1, regT0);
     62 }
     63 
     64 void JIT::compileOpCallVarargs(Instruction* instruction)
     65 {
     66     int callee = instruction[1].u.operand;
     67     int argCountRegister = instruction[2].u.operand;
     68     int registerOffset = instruction[3].u.operand;
     69 
     70     emitLoad(callee, regT1, regT0);
     71     emitLoadPayload(argCountRegister, regT2); // argCount
     72     addPtr(Imm32(registerOffset), regT2, regT3); // registerOffset
     73 
     74     emitJumpSlowCaseIfNotJSCell(callee, regT1);
     75     addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsFunctionVPtr)));
     76 
     77     // Speculatively roll the callframe, assuming argCount will match the arity.
     78     mul32(TrustedImm32(sizeof(Register)), regT3, regT3);
     79     addPtr(callFrameRegister, regT3);
     80     store32(TrustedImm32(JSValue::CellTag), tagFor(RegisterFile::CallerFrame, regT3));
     81     storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame, regT3));
     82     move(regT3, callFrameRegister);
     83 
     84     move(regT2, regT1); // argCount
     85 
     86     emitNakedCall(m_globalData->jitStubs->ctiVirtualCall());
     87 
     88     sampleCodeBlock(m_codeBlock);
     89 }
     90 
     91 void JIT::compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
     92 {
     93     int callee = instruction[1].u.operand;
     94 
     95     linkSlowCaseIfNotJSCell(iter, callee);
     96     linkSlowCase(iter);
     97 
     98     JITStubCall stubCall(this, cti_op_call_NotJSFunction);
     99     stubCall.addArgument(regT1, regT0);
    100     stubCall.addArgument(regT3);
    101     stubCall.addArgument(regT2);
    102     stubCall.call();
    103 
    104     sampleCodeBlock(m_codeBlock);
    105 }
    106 
    107 void JIT::emit_op_ret(Instruction* currentInstruction)
    108 {
    109     unsigned dst = currentInstruction[1].u.operand;
    110 
    111     emitLoad(dst, regT1, regT0);
    112     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
    113     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
    114 
    115     restoreReturnAddressBeforeReturn(regT2);
    116     ret();
    117 }
    118 
    119 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
    120 {
    121     unsigned result = currentInstruction[1].u.operand;
    122     unsigned thisReg = currentInstruction[2].u.operand;
    123 
    124     emitLoad(result, regT1, regT0);
    125     Jump notJSCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
    126     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
    127     Jump notObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
    128 
    129     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
    130     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
    131 
    132     restoreReturnAddressBeforeReturn(regT2);
    133     ret();
    134 
    135     notJSCell.link(this);
    136     notObject.link(this);
    137     emitLoad(thisReg, regT1, regT0);
    138 
    139     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
    140     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
    141 
    142     restoreReturnAddressBeforeReturn(regT2);
    143     ret();
    144 }
    145 
    146 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    147 {
    148     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
    149 }
    150 
    151 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    152 {
    153     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
    154 }
    155 
    156 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    157 {
    158     compileOpCallVarargsSlowCase(currentInstruction, iter);
    159 }
    160 
    161 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    162 {
    163     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
    164 }
    165 
    166 void JIT::emit_op_call(Instruction* currentInstruction)
    167 {
    168     compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
    169 }
    170 
    171 void JIT::emit_op_call_eval(Instruction* currentInstruction)
    172 {
    173     compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
    174 }
    175 
    176 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
    177 {
    178     compileOpCallVarargs(currentInstruction);
    179 }
    180 
    181 void JIT::emit_op_construct(Instruction* currentInstruction)
    182 {
    183     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
    184 }
    185 
    186 #if !ENABLE(JIT_OPTIMIZE_CALL)
    187 
    188 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
    189 
    190 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned)
    191 {
    192     int callee = instruction[1].u.operand;
    193     int argCount = instruction[2].u.operand;
    194     int registerOffset = instruction[3].u.operand;
    195 
    196     Jump wasEval;
    197     if (opcodeID == op_call_eval) {
    198         JITStubCall stubCall(this, cti_op_call_eval);
    199         stubCall.addArgument(callee);
    200         stubCall.addArgument(JIT::Imm32(registerOffset));
    201         stubCall.addArgument(JIT::Imm32(argCount));
    202         stubCall.call();
    203         wasEval = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
    204     }
    205 
    206     emitLoad(callee, regT1, regT0);
    207 
    208     emitJumpSlowCaseIfNotJSCell(callee, regT1);
    209     addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsFunctionVPtr)));
    210 
    211     // Speculatively roll the callframe, assuming argCount will match the arity.
    212     store32(TrustedImm32(JSValue::CellTag), tagFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
    213     storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
    214     addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
    215     move(TrustedImm32(argCount), regT1);
    216 
    217     emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstruct() : m_globalData->jitStubs->ctiVirtualCall());
    218 
    219     if (opcodeID == op_call_eval)
    220         wasEval.link(this);
    221 
    222     sampleCodeBlock(m_codeBlock);
    223 }
    224 
    225 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned, OpcodeID opcodeID)
    226 {
    227     int callee = instruction[1].u.operand;
    228     int argCount = instruction[2].u.operand;
    229     int registerOffset = instruction[3].u.operand;
    230 
    231     linkSlowCaseIfNotJSCell(iter, callee);
    232     linkSlowCase(iter);
    233 
    234     JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
    235     stubCall.addArgument(callee);
    236     stubCall.addArgument(JIT::Imm32(registerOffset));
    237     stubCall.addArgument(JIT::Imm32(argCount));
    238     stubCall.call();
    239 
    240     sampleCodeBlock(m_codeBlock);
    241 }
    242 
    243 #else // !ENABLE(JIT_OPTIMIZE_CALL)
    244 
    245 /* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
    246 
    247 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
    248 {
    249     int callee = instruction[1].u.operand;
    250     int argCount = instruction[2].u.operand;
    251     int registerOffset = instruction[3].u.operand;
    252 
    253     Jump wasEval;
    254     if (opcodeID == op_call_eval) {
    255         JITStubCall stubCall(this, cti_op_call_eval);
    256         stubCall.addArgument(callee);
    257         stubCall.addArgument(JIT::Imm32(registerOffset));
    258         stubCall.addArgument(JIT::Imm32(argCount));
    259         stubCall.call();
    260         wasEval = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
    261     }
    262 
    263     emitLoad(callee, regT1, regT0);
    264 
    265     DataLabelPtr addressOfLinkedFunctionCheck;
    266 
    267     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
    268 
    269     Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0));
    270 
    271     END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
    272 
    273     addSlowCase(jumpToSlow);
    274     ASSERT_JIT_OFFSET(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow), patchOffsetOpCallCompareToJump);
    275     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
    276 
    277     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
    278 
    279     // The following is the fast case, only used whan a callee can be linked.
    280 
    281     // Fast version of stack frame initialization, directly relative to edi.
    282     // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
    283     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT2);
    284 
    285     store32(TrustedImm32(JSValue::Int32Tag), tagFor(registerOffset + RegisterFile::ArgumentCount));
    286     store32(Imm32(argCount), payloadFor(registerOffset + RegisterFile::ArgumentCount));
    287     storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
    288     emitStore(registerOffset + RegisterFile::Callee, regT1, regT0);
    289     store32(TrustedImm32(JSValue::CellTag), tagFor(registerOffset + RegisterFile::ScopeChain));
    290     store32(regT2, payloadFor(registerOffset + RegisterFile::ScopeChain));
    291     addPtr(Imm32(registerOffset * sizeof(Register)), callFrameRegister);
    292 
    293     // Call to the callee
    294     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
    295 
    296     if (opcodeID == op_call_eval)
    297         wasEval.link(this);
    298 
    299     sampleCodeBlock(m_codeBlock);
    300 }
    301 
    302 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID)
    303 {
    304     int callee = instruction[1].u.operand;
    305     int argCount = instruction[2].u.operand;
    306     int registerOffset = instruction[3].u.operand;
    307 
    308     linkSlowCase(iter);
    309     linkSlowCase(iter);
    310 
    311     // Fast check for JS function.
    312     Jump callLinkFailNotObject = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
    313     Jump callLinkFailNotJSFunction = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsFunctionVPtr));
    314 
    315     // Speculatively roll the callframe, assuming argCount will match the arity.
    316     store32(TrustedImm32(JSValue::CellTag), tagFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
    317     storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
    318     addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
    319     move(Imm32(argCount), regT1);
    320 
    321     m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstructLink() : m_globalData->jitStubs->ctiVirtualCallLink());
    322 
    323     // Done! - return back to the hot path.
    324     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval));
    325     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
    326     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call));
    327 
    328     // This handles host functions
    329     callLinkFailNotObject.link(this);
    330     callLinkFailNotJSFunction.link(this);
    331 
    332     JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
    333     stubCall.addArgument(callee);
    334     stubCall.addArgument(JIT::Imm32(registerOffset));
    335     stubCall.addArgument(JIT::Imm32(argCount));
    336     stubCall.call();
    337 
    338     sampleCodeBlock(m_codeBlock);
    339 }
    340 
    341 /* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
    342 
    343 #endif // !ENABLE(JIT_OPTIMIZE_CALL)
    344 
    345 } // namespace JSC
    346 
    347 #endif // USE(JSVALUE32_64)
    348 #endif // ENABLE(JIT)
    349