Home | History | Annotate | Download | only in mips
      1 // Copyright 2010 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 
     29 #ifndef V8_MIPS_CODEGEN_MIPS_H_
     30 #define V8_MIPS_CODEGEN_MIPS_H_
     31 
     32 
     33 #include "ast.h"
     34 #include "code-stubs-mips.h"
     35 #include "ic-inl.h"
     36 
     37 namespace v8 {
     38 namespace internal {
     39 
     40 #if(defined(__mips_hard_float) && __mips_hard_float != 0)
     41 // Use floating-point coprocessor instructions. This flag is raised when
     42 // -mhard-float is passed to the compiler.
     43 static const bool IsMipsSoftFloatABI = false;
     44 #elif(defined(__mips_soft_float) && __mips_soft_float != 0)
     45 // Not using floating-point coprocessor instructions. This flag is raised when
     46 // -msoft-float is passed to the compiler.
     47 static const bool IsMipsSoftFloatABI = true;
     48 #else
     49 static const bool IsMipsSoftFloatABI = true;
     50 #endif
     51 
     52 // Forward declarations
     53 class CompilationInfo;
     54 class DeferredCode;
     55 class JumpTarget;
     56 class RegisterAllocator;
     57 class RegisterFile;
     58 
     59 enum InitState { CONST_INIT, NOT_CONST_INIT };
     60 enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
     61 enum GenerateInlineSmi { DONT_GENERATE_INLINE_SMI, GENERATE_INLINE_SMI };
     62 enum WriteBarrierCharacter { UNLIKELY_SMI, LIKELY_SMI, NEVER_NEWSPACE };
     63 
     64 
     65 // -----------------------------------------------------------------------------
     66 // Reference support
     67 
     68 // A reference is a C++ stack-allocated object that keeps an ECMA
     69 // reference on the execution stack while in scope. For variables
     70 // the reference is empty, indicating that it isn't necessary to
     71 // store state on the stack for keeping track of references to those.
     72 // For properties, we keep either one (named) or two (indexed) values
     73 // on the execution stack to represent the reference.
     74 class Reference BASE_EMBEDDED {
     75  public:
     76   // The values of the types is important, see size().
     77   enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
     78   Reference(CodeGenerator* cgen,
     79             Expression* expression,
     80             bool persist_after_get = false);
     81   ~Reference();
     82 
     83   Expression* expression() const { return expression_; }
     84   Type type() const { return type_; }
     85   void set_type(Type value) {
     86     ASSERT_EQ(ILLEGAL, type_);
     87     type_ = value;
     88   }
     89 
     90   void set_unloaded() {
     91     ASSERT_NE(ILLEGAL, type_);
     92     ASSERT_NE(UNLOADED, type_);
     93     type_ = UNLOADED;
     94   }
     95   // The size the reference takes up on the stack.
     96   int size() const {
     97     return (type_ < SLOT) ? 0 : type_;
     98   }
     99 
    100   bool is_illegal() const { return type_ == ILLEGAL; }
    101   bool is_slot() const { return type_ == SLOT; }
    102   bool is_property() const { return type_ == NAMED || type_ == KEYED; }
    103   bool is_unloaded() const { return type_ == UNLOADED; }
    104 
    105   // Return the name. Only valid for named property references.
    106   Handle<String> GetName();
    107 
    108   // Generate code to push the value of the reference on top of the
    109   // expression stack.  The reference is expected to be already on top of
    110   // the expression stack, and it is consumed by the call unless the
    111   // reference is for a compound assignment.
    112   // If the reference is not consumed, it is left in place under its value.
    113   void GetValue();
    114 
    115   // Generate code to pop a reference, push the value of the reference,
    116   // and then spill the stack frame.
    117   inline void GetValueAndSpill();
    118 
    119   // Generate code to store the value on top of the expression stack in the
    120   // reference.  The reference is expected to be immediately below the value
    121   // on the expression stack.  The  value is stored in the location specified
    122   // by the reference, and is left on top of the stack, after the reference
    123   // is popped from beneath it (unloaded).
    124   void SetValue(InitState init_state, WriteBarrierCharacter wb);
    125 
    126   // This is in preparation for something that uses the reference on the stack.
    127   // If we need this reference afterwards get then dup it now.  Otherwise mark
    128   // it as used.
    129   inline void DupIfPersist();
    130 
    131  private:
    132   CodeGenerator* cgen_;
    133   Expression* expression_;
    134   Type type_;
    135   // Keep the reference on the stack after get, so it can be used by set later.
    136   bool persist_after_get_;
    137 };
    138 
    139 
    140 // -----------------------------------------------------------------------------
    141 // Code generation state
    142 
    143 // The state is passed down the AST by the code generator (and back up, in
    144 // the form of the state of the label pair).  It is threaded through the
    145 // call stack.  Constructing a state implicitly pushes it on the owning code
    146 // generator's stack of states, and destroying one implicitly pops it.
    147 
    148 class CodeGenState BASE_EMBEDDED {
    149  public:
    150   // Create an initial code generator state.  Destroying the initial state
    151   // leaves the code generator with a NULL state.
    152   explicit CodeGenState(CodeGenerator* owner);
    153 
    154 
    155 
    156   // Destroy a code generator state and restore the owning code generator's
    157   // previous state.
    158   virtual ~CodeGenState();
    159 
    160   virtual JumpTarget* true_target() const { return NULL; }
    161   virtual JumpTarget* false_target() const { return NULL; }
    162 
    163  protected:
    164   inline CodeGenerator* owner() { return owner_; }
    165   inline CodeGenState* previous() const { return previous_; }
    166 
    167  private:
    168   // The owning code generator.
    169   CodeGenerator* owner_;
    170 
    171 
    172 
    173   // The previous state of the owning code generator, restored when
    174   // this state is destroyed.
    175   CodeGenState* previous_;
    176 };
    177 
    178 
    179 class ConditionCodeGenState : public CodeGenState {
    180  public:
    181   // Create a code generator state based on a code generator's current
    182   // state.  The new state has its own pair of branch labels.
    183   ConditionCodeGenState(CodeGenerator* owner,
    184                         JumpTarget* true_target,
    185                         JumpTarget* false_target);
    186 
    187   virtual JumpTarget* true_target() const { return true_target_; }
    188   virtual JumpTarget* false_target() const { return false_target_; }
    189 
    190  private:
    191   JumpTarget* true_target_;
    192   JumpTarget* false_target_;
    193 };
    194 
    195 
    196 class TypeInfoCodeGenState : public CodeGenState {
    197  public:
    198   TypeInfoCodeGenState(CodeGenerator* owner,
    199                        Slot* slot_number,
    200                        TypeInfo info);
    201   virtual ~TypeInfoCodeGenState();
    202 
    203   virtual JumpTarget* true_target() const { return previous()->true_target(); }
    204   virtual JumpTarget* false_target() const {
    205     return previous()->false_target();
    206   }
    207 
    208  private:
    209   Slot* slot_;
    210   TypeInfo old_type_info_;
    211 };
    212 
    213 
    214 // -------------------------------------------------------------------------
    215 // Arguments allocation mode
    216 
    217 enum ArgumentsAllocationMode {
    218   NO_ARGUMENTS_ALLOCATION,
    219   EAGER_ARGUMENTS_ALLOCATION,
    220   LAZY_ARGUMENTS_ALLOCATION
    221 };
    222 
    223 
    224 // -----------------------------------------------------------------------------
    225 // CodeGenerator
    226 
    227 class CodeGenerator: public AstVisitor {
    228  public:
    229   // Compilation mode.  Either the compiler is used as the primary
    230   // compiler and needs to setup everything or the compiler is used as
    231   // the secondary compiler for split compilation and has to handle
    232   // bailouts.
    233   enum Mode {
    234     PRIMARY,
    235     SECONDARY
    236   };
    237 
    238   static bool MakeCode(CompilationInfo* info);
    239 
    240   // Printing of AST, etc. as requested by flags.
    241   static void MakeCodePrologue(CompilationInfo* info);
    242 
    243   // Allocate and install the code.
    244   static Handle<Code> MakeCodeEpilogue(MacroAssembler* masm,
    245                                        Code::Flags flags,
    246                                        CompilationInfo* info);
    247 
    248   // Print the code after compiling it.
    249   static void PrintCode(Handle<Code> code, CompilationInfo* info);
    250 
    251 #ifdef ENABLE_LOGGING_AND_PROFILING
    252   static bool ShouldGenerateLog(Expression* type);
    253 #endif
    254 
    255   static void SetFunctionInfo(Handle<JSFunction> fun,
    256                               FunctionLiteral* lit,
    257                               bool is_toplevel,
    258                               Handle<Script> script);
    259 
    260   static bool RecordPositions(MacroAssembler* masm,
    261                               int pos,
    262                               bool right_here = false);
    263 
    264   // Accessors
    265   MacroAssembler* masm() { return masm_; }
    266   VirtualFrame* frame() const { return frame_; }
    267   inline Handle<Script> script();
    268 
    269   bool has_valid_frame() const { return frame_ != NULL; }
    270 
    271   // Set the virtual frame to be new_frame, with non-frame register
    272   // reference counts given by non_frame_registers.  The non-frame
    273   // register reference counts of the old frame are returned in
    274   // non_frame_registers.
    275   void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers);
    276 
    277   void DeleteFrame();
    278 
    279   RegisterAllocator* allocator() const { return allocator_; }
    280 
    281   CodeGenState* state() { return state_; }
    282   void set_state(CodeGenState* state) { state_ = state; }
    283 
    284   TypeInfo type_info(Slot* slot) {
    285     int index = NumberOfSlot(slot);
    286     if (index == kInvalidSlotNumber) return TypeInfo::Unknown();
    287     return (*type_info_)[index];
    288   }
    289 
    290   TypeInfo set_type_info(Slot* slot, TypeInfo info) {
    291     int index = NumberOfSlot(slot);
    292     ASSERT(index >= kInvalidSlotNumber);
    293     if (index != kInvalidSlotNumber) {
    294       TypeInfo previous_value = (*type_info_)[index];
    295       (*type_info_)[index] = info;
    296       return previous_value;
    297     }
    298     return TypeInfo::Unknown();
    299   }
    300   void AddDeferred(DeferredCode* code) { deferred_.Add(code); }
    301 
    302   // Constants related to patching of inlined load/store.
    303   static int GetInlinedKeyedLoadInstructionsAfterPatch() {
    304     // This is in correlation with the padding in MacroAssembler::Abort.
    305     return FLAG_debug_code ? 45 : 20;
    306   }
    307   static const int kInlinedKeyedStoreInstructionsAfterPatch = 9;
    308   static int GetInlinedNamedStoreInstructionsAfterPatch() {
    309     ASSERT(Isolate::Current()->inlined_write_barrier_size() != -1);
    310     // Magic number 5: instruction count after patched map load:
    311     //  li: 2 (liu & ori), Branch : 2 (bne & nop), sw : 1
    312     return Isolate::Current()->inlined_write_barrier_size() + 5;
    313   }
    314 
    315  private:
    316   // Type of a member function that generates inline code for a native function.
    317   typedef void (CodeGenerator::*InlineFunctionGenerator)
    318       (ZoneList<Expression*>*);
    319 
    320   static const InlineFunctionGenerator kInlineFunctionGenerators[];
    321 
    322 
    323   // Construction/Destruction.
    324   explicit CodeGenerator(MacroAssembler* masm);
    325 
    326   // Accessors.
    327   inline bool is_eval();
    328   inline Scope* scope();
    329   inline bool is_strict_mode();
    330   inline StrictModeFlag strict_mode_flag();
    331 
    332   // Generating deferred code.
    333   void ProcessDeferred();
    334 
    335   static const int kInvalidSlotNumber = -1;
    336 
    337   int NumberOfSlot(Slot* slot);
    338   // State
    339   bool has_cc() const { return cc_reg_ != cc_always; }
    340 
    341   JumpTarget* true_target() const { return state_->true_target(); }
    342   JumpTarget* false_target() const { return state_->false_target(); }
    343 
    344   // Track loop nesting level.
    345   int loop_nesting() const { return loop_nesting_; }
    346   void IncrementLoopNesting() { loop_nesting_++; }
    347   void DecrementLoopNesting() { loop_nesting_--; }
    348 
    349   // Node visitors.
    350   void VisitStatements(ZoneList<Statement*>* statements);
    351 
    352   virtual void VisitSlot(Slot* node);
    353 #define DEF_VISIT(type) \
    354   virtual void Visit##type(type* node);
    355   AST_NODE_LIST(DEF_VISIT)
    356 #undef DEF_VISIT
    357 
    358   // Main code generation function
    359   void Generate(CompilationInfo* info);
    360 
    361   // Generate the return sequence code.  Should be called no more than
    362   // once per compiled function, immediately after binding the return
    363   // target (which can not be done more than once).  The return value should
    364   // be in v0.
    365   void GenerateReturnSequence();
    366 
    367   // Returns the arguments allocation mode.
    368   ArgumentsAllocationMode ArgumentsMode();
    369 
    370   // Store the arguments object and allocate it if necessary.
    371   void StoreArgumentsObject(bool initial);
    372 
    373   // The following are used by class Reference.
    374   void LoadReference(Reference* ref);
    375   void UnloadReference(Reference* ref);
    376 
    377   MemOperand SlotOperand(Slot* slot, Register tmp);
    378 
    379   MemOperand ContextSlotOperandCheckExtensions(Slot* slot,
    380                                                Register tmp,
    381                                                Register tmp2,
    382                                                JumpTarget* slow);
    383 
    384   void LoadCondition(Expression* x,
    385                      JumpTarget* true_target,
    386                      JumpTarget* false_target,
    387                      bool force_cc);
    388   void Load(Expression* x);
    389   void LoadGlobal();
    390   void LoadGlobalReceiver(Register scratch);
    391 
    392 
    393   // Special code for typeof expressions: Unfortunately, we must
    394   // be careful when loading the expression in 'typeof'
    395   // expressions. We are not allowed to throw reference errors for
    396   // non-existing properties of the global object, so we must make it
    397   // look like an explicit property access, instead of an access
    398   // through the context chain.
    399   void LoadTypeofExpression(Expression* x);
    400 
    401   // Store a keyed property. Key and receiver are on the stack and the value is
    402   // in a0. Result is returned in r0.
    403   void EmitKeyedStore(StaticType* key_type, WriteBarrierCharacter wb_info);
    404 
    405   // Read a value from a slot and leave it on top of the expression stack.
    406   void LoadFromSlot(Slot* slot, TypeofState typeof_state);
    407   void LoadFromGlobalSlotCheckExtensions(Slot* slot,
    408                                          TypeofState typeof_state,
    409                                          JumpTarget* slow);
    410   void LoadFromSlotCheckForArguments(Slot* slot, TypeofState state);
    411 
    412   // Support for loading from local/global variables and arguments
    413   // whose location is known unless they are shadowed by
    414   // eval-introduced bindings. Generates no code for unsupported slot
    415   // types and therefore expects to fall through to the slow jump target.
    416   void EmitDynamicLoadFromSlotFastCase(Slot* slot,
    417                                        TypeofState typeof_state,
    418                                        JumpTarget* slow,
    419                                        JumpTarget* done);
    420 
    421   // Store the value on top of the stack to a slot.
    422   void StoreToSlot(Slot* slot, InitState init_state);
    423 
    424   // Support for compiling assignment expressions.
    425   void EmitSlotAssignment(Assignment* node);
    426   void EmitNamedPropertyAssignment(Assignment* node);
    427   void EmitKeyedPropertyAssignment(Assignment* node);
    428 
    429   // Load a named property, returning it in v0. The receiver is passed on the
    430   // stack, and remains there.
    431   void EmitNamedLoad(Handle<String> name, bool is_contextual);
    432 
    433   // Store to a named property. If the store is contextual, value is passed on
    434   // the frame and consumed. Otherwise, receiver and value are passed on the
    435   // frame and consumed. The result is returned in v0.
    436   void EmitNamedStore(Handle<String> name, bool is_contextual);
    437 
    438   // Load a keyed property, leaving it in v0. The receiver and key are
    439   // passed on the stack, and remain there.
    440   void EmitKeyedLoad();
    441 
    442   void ToBoolean(JumpTarget* true_target, JumpTarget* false_target);
    443 
    444   // Generate code that computes a shortcutting logical operation.
    445   void GenerateLogicalBooleanOperation(BinaryOperation* node);
    446 
    447   void GenericBinaryOperation(Token::Value op,
    448                               OverwriteMode overwrite_mode,
    449                               GenerateInlineSmi inline_smi,
    450                               int known_rhs =
    451                                 GenericBinaryOpStub::kUnknownIntValue);
    452 
    453   void VirtualFrameBinaryOperation(Token::Value op,
    454                                    OverwriteMode overwrite_mode,
    455                                    int known_rhs =
    456                                       GenericBinaryOpStub::kUnknownIntValue);
    457 
    458   void SmiOperation(Token::Value op,
    459                     Handle<Object> value,
    460                     bool reversed,
    461                     OverwriteMode mode);
    462 
    463   void Comparison(Condition cc,
    464                   Expression* left,
    465                   Expression* right,
    466                   bool strict = false);
    467 
    468   void CallWithArguments(ZoneList<Expression*>* arguments,
    469                          CallFunctionFlags flags,
    470                          int position);
    471 
    472   // An optimized implementation of expressions of the form
    473   // x.apply(y, arguments).  We call x the applicand and y the receiver.
    474   // The optimization avoids allocating an arguments object if possible.
    475   void CallApplyLazy(Expression* applicand,
    476                      Expression* receiver,
    477                      VariableProxy* arguments,
    478                      int position);
    479 
    480   // Control flow
    481   void Branch(bool if_true, JumpTarget* target);
    482   void CheckStack();
    483 
    484   bool CheckForInlineRuntimeCall(CallRuntime* node);
    485 
    486   static Handle<Code> ComputeLazyCompile(int argc);
    487   void ProcessDeclarations(ZoneList<Declaration*>* declarations);
    488 
    489   // Declare global variables and functions in the given array of
    490   // name/value pairs.
    491   void DeclareGlobals(Handle<FixedArray> pairs);
    492 
    493   // Instantiate the function based on the shared function info.
    494   void InstantiateFunction(Handle<SharedFunctionInfo> function_info,
    495                            bool pretenure);
    496 
    497   // Support for type checks.
    498   void GenerateIsSmi(ZoneList<Expression*>* args);
    499   void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
    500   void GenerateIsArray(ZoneList<Expression*>* args);
    501   void GenerateIsRegExp(ZoneList<Expression*>* args);
    502 
    503   // Support for construct call checks.
    504   void GenerateIsConstructCall(ZoneList<Expression*>* args);
    505 
    506   // Support for arguments.length and arguments[?].
    507   void GenerateArgumentsLength(ZoneList<Expression*>* args);
    508   void GenerateArguments(ZoneList<Expression*>* args);
    509 
    510   // Support for accessing the class and value fields of an object.
    511   void GenerateClassOf(ZoneList<Expression*>* args);
    512   void GenerateValueOf(ZoneList<Expression*>* args);
    513   void GenerateSetValueOf(ZoneList<Expression*>* args);
    514 
    515   // Fast support for charCodeAt(n).
    516   void GenerateStringCharCodeAt(ZoneList<Expression*>* args);
    517 
    518   // Fast support for string.charAt(n) and string[n].
    519   void GenerateStringCharFromCode(ZoneList<Expression*>* args);
    520 
    521   // Fast support for string.charAt(n) and string[n].
    522   void GenerateStringCharAt(ZoneList<Expression*>* args);
    523 
    524   // Fast support for object equality testing.
    525   void GenerateObjectEquals(ZoneList<Expression*>* args);
    526 
    527   void GenerateLog(ZoneList<Expression*>* args);
    528 
    529   // Fast support for Math.random().
    530   void GenerateRandomHeapNumber(ZoneList<Expression*>* args);
    531 
    532   void GenerateIsObject(ZoneList<Expression*>* args);
    533   void GenerateIsSpecObject(ZoneList<Expression*>* args);
    534   void GenerateIsFunction(ZoneList<Expression*>* args);
    535   void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
    536   void GenerateStringAdd(ZoneList<Expression*>* args);
    537   void GenerateSubString(ZoneList<Expression*>* args);
    538   void GenerateStringCompare(ZoneList<Expression*>* args);
    539   void GenerateIsStringWrapperSafeForDefaultValueOf(
    540       ZoneList<Expression*>* args);
    541 
    542   // Support for direct calls from JavaScript to native RegExp code.
    543   void GenerateRegExpExec(ZoneList<Expression*>* args);
    544 
    545   void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
    546 
    547   // Support for fast native caches.
    548   void GenerateGetFromCache(ZoneList<Expression*>* args);
    549 
    550   // Fast support for number to string.
    551   void GenerateNumberToString(ZoneList<Expression*>* args);
    552 
    553   // Fast swapping of elements.
    554   void GenerateSwapElements(ZoneList<Expression*>* args);
    555 
    556   // Fast call for custom callbacks.
    557   void GenerateCallFunction(ZoneList<Expression*>* args);
    558 
    559   // Fast call to math functions.
    560   void GenerateMathPow(ZoneList<Expression*>* args);
    561   void GenerateMathSin(ZoneList<Expression*>* args);
    562   void GenerateMathCos(ZoneList<Expression*>* args);
    563   void GenerateMathSqrt(ZoneList<Expression*>* args);
    564   void GenerateMathLog(ZoneList<Expression*>* args);
    565 
    566   void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
    567 
    568   void GenerateHasCachedArrayIndex(ZoneList<Expression*>* args);
    569   void GenerateGetCachedArrayIndex(ZoneList<Expression*>* args);
    570   void GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args);
    571 
    572   // Simple condition analysis.
    573   enum ConditionAnalysis {
    574     ALWAYS_TRUE,
    575     ALWAYS_FALSE,
    576     DONT_KNOW
    577   };
    578   ConditionAnalysis AnalyzeCondition(Expression* cond);
    579 
    580   // Methods used to indicate which source code is generated for. Source
    581   // positions are collected by the assembler and emitted with the relocation
    582   // information.
    583   void CodeForFunctionPosition(FunctionLiteral* fun);
    584   void CodeForReturnPosition(FunctionLiteral* fun);
    585   void CodeForStatementPosition(Statement* node);
    586   void CodeForDoWhileConditionPosition(DoWhileStatement* stmt);
    587   void CodeForSourcePosition(int pos);
    588 
    589 #ifdef DEBUG
    590   // True if the registers are valid for entry to a block.
    591   bool HasValidEntryRegisters();
    592 #endif
    593 
    594   List<DeferredCode*> deferred_;
    595 
    596   // Assembler
    597   MacroAssembler* masm_;  // to generate code
    598 
    599   CompilationInfo* info_;
    600 
    601   // Code generation state
    602   VirtualFrame* frame_;
    603   RegisterAllocator* allocator_;
    604   Condition cc_reg_;
    605   CodeGenState* state_;
    606   int loop_nesting_;
    607 
    608   Vector<TypeInfo>* type_info_;
    609   // Jump targets
    610   BreakTarget function_return_;
    611 
    612   // True if the function return is shadowed (ie, jumping to the target
    613   // function_return_ does not jump to the true function return, but rather
    614   // to some unlinking code).
    615   bool function_return_is_shadowed_;
    616 
    617   friend class VirtualFrame;
    618   friend class Isolate;
    619   friend class JumpTarget;
    620   friend class Reference;
    621   friend class FastCodeGenerator;
    622   friend class FullCodeGenerator;
    623   friend class FullCodeGenSyntaxChecker;
    624   friend class InlineRuntimeFunctionsTable;
    625   friend class LCodeGen;
    626 
    627   DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
    628 };
    629 
    630 
    631 } }  // namespace v8::internal
    632 
    633 #endif  // V8_MIPS_CODEGEN_MIPS_H_
    634