Home | History | Annotate | Download | only in ia32
      1 // Copyright 2011 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #ifndef V8_IA32_MACRO_ASSEMBLER_IA32_H_
     29 #define V8_IA32_MACRO_ASSEMBLER_IA32_H_
     30 
     31 #include "assembler.h"
     32 #include "type-info.h"
     33 
     34 namespace v8 {
     35 namespace internal {
     36 
     37 // Flags used for the AllocateInNewSpace functions.
     38 enum AllocationFlags {
     39   // No special flags.
     40   NO_ALLOCATION_FLAGS = 0,
     41   // Return the pointer to the allocated already tagged as a heap object.
     42   TAG_OBJECT = 1 << 0,
     43   // The content of the result register already contains the allocation top in
     44   // new space.
     45   RESULT_CONTAINS_TOP = 1 << 1
     46 };
     47 
     48 // Convenience for platform-independent signatures.  We do not normally
     49 // distinguish memory operands from other operands on ia32.
     50 typedef Operand MemOperand;
     51 
     52 // Forward declaration.
     53 class PostCallGenerator;
     54 
     55 // MacroAssembler implements a collection of frequently used macros.
     56 class MacroAssembler: public Assembler {
     57  public:
     58   // The isolate parameter can be NULL if the macro assembler should
     59   // not use isolate-dependent functionality. In this case, it's the
     60   // responsibility of the caller to never invoke such function on the
     61   // macro assembler.
     62   MacroAssembler(Isolate* isolate, void* buffer, int size);
     63 
     64   // ---------------------------------------------------------------------------
     65   // GC Support
     66 
     67   // For page containing |object| mark region covering |addr| dirty.
     68   // RecordWriteHelper only works if the object is not in new
     69   // space.
     70   void RecordWriteHelper(Register object,
     71                          Register addr,
     72                          Register scratch);
     73 
     74   // Check if object is in new space.
     75   // scratch can be object itself, but it will be clobbered.
     76   template <typename LabelType>
     77   void InNewSpace(Register object,
     78                   Register scratch,
     79                   Condition cc,  // equal for new space, not_equal otherwise.
     80                   LabelType* branch);
     81 
     82   // For page containing |object| mark region covering [object+offset]
     83   // dirty. |object| is the object being stored into, |value| is the
     84   // object being stored. If offset is zero, then the scratch register
     85   // contains the array index into the elements array represented as a
     86   // Smi. All registers are clobbered by the operation. RecordWrite
     87   // filters out smis so it does not update the write barrier if the
     88   // value is a smi.
     89   void RecordWrite(Register object,
     90                    int offset,
     91                    Register value,
     92                    Register scratch);
     93 
     94   // For page containing |object| mark region covering |address|
     95   // dirty. |object| is the object being stored into, |value| is the
     96   // object being stored. All registers are clobbered by the
     97   // operation. RecordWrite filters out smis so it does not update the
     98   // write barrier if the value is a smi.
     99   void RecordWrite(Register object,
    100                    Register address,
    101                    Register value);
    102 
    103 #ifdef ENABLE_DEBUGGER_SUPPORT
    104   // ---------------------------------------------------------------------------
    105   // Debugger Support
    106 
    107   void DebugBreak();
    108 #endif
    109 
    110   // ---------------------------------------------------------------------------
    111   // Activation frames
    112 
    113   void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
    114   void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
    115 
    116   void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
    117   void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
    118 
    119   // Enter specific kind of exit frame. Expects the number of
    120   // arguments in register eax and sets up the number of arguments in
    121   // register edi and the pointer to the first argument in register
    122   // esi.
    123   void EnterExitFrame(bool save_doubles);
    124 
    125   void EnterApiExitFrame(int argc);
    126 
    127   // Leave the current exit frame. Expects the return value in
    128   // register eax:edx (untouched) and the pointer to the first
    129   // argument in register esi.
    130   void LeaveExitFrame(bool save_doubles);
    131 
    132   // Leave the current exit frame. Expects the return value in
    133   // register eax (untouched).
    134   void LeaveApiExitFrame();
    135 
    136   // Find the function context up the context chain.
    137   void LoadContext(Register dst, int context_chain_length);
    138 
    139   // Load the global function with the given index.
    140   void LoadGlobalFunction(int index, Register function);
    141 
    142   // Load the initial map from the global function. The registers
    143   // function and map can be the same.
    144   void LoadGlobalFunctionInitialMap(Register function, Register map);
    145 
    146   // Push and pop the registers that can hold pointers.
    147   void PushSafepointRegisters() { pushad(); }
    148   void PopSafepointRegisters() { popad(); }
    149   // Store the value in register/immediate src in the safepoint
    150   // register stack slot for register dst.
    151   void StoreToSafepointRegisterSlot(Register dst, Register src);
    152   void StoreToSafepointRegisterSlot(Register dst, Immediate src);
    153   void LoadFromSafepointRegisterSlot(Register dst, Register src);
    154 
    155   // ---------------------------------------------------------------------------
    156   // JavaScript invokes
    157 
    158   // Invoke the JavaScript function code by either calling or jumping.
    159   void InvokeCode(const Operand& code,
    160                   const ParameterCount& expected,
    161                   const ParameterCount& actual,
    162                   InvokeFlag flag,
    163                   PostCallGenerator* post_call_generator = NULL);
    164 
    165   void InvokeCode(Handle<Code> code,
    166                   const ParameterCount& expected,
    167                   const ParameterCount& actual,
    168                   RelocInfo::Mode rmode,
    169                   InvokeFlag flag,
    170                   PostCallGenerator* post_call_generator = NULL);
    171 
    172   // Invoke the JavaScript function in the given register. Changes the
    173   // current context to the context in the function before invoking.
    174   void InvokeFunction(Register function,
    175                       const ParameterCount& actual,
    176                       InvokeFlag flag,
    177                       PostCallGenerator* post_call_generator = NULL);
    178 
    179   void InvokeFunction(JSFunction* function,
    180                       const ParameterCount& actual,
    181                       InvokeFlag flag,
    182                       PostCallGenerator* post_call_generator = NULL);
    183 
    184   // Invoke specified builtin JavaScript function. Adds an entry to
    185   // the unresolved list if the name does not resolve.
    186   void InvokeBuiltin(Builtins::JavaScript id,
    187                      InvokeFlag flag,
    188                      PostCallGenerator* post_call_generator = NULL);
    189 
    190   // Store the function for the given builtin in the target register.
    191   void GetBuiltinFunction(Register target, Builtins::JavaScript id);
    192 
    193   // Store the code object for the given builtin in the target register.
    194   void GetBuiltinEntry(Register target, Builtins::JavaScript id);
    195 
    196   // Expression support
    197   void Set(Register dst, const Immediate& x);
    198   void Set(const Operand& dst, const Immediate& x);
    199 
    200   // Support for constant splitting.
    201   bool IsUnsafeImmediate(const Immediate& x);
    202   void SafeSet(Register dst, const Immediate& x);
    203   void SafePush(const Immediate& x);
    204 
    205   // Compare object type for heap object.
    206   // Incoming register is heap_object and outgoing register is map.
    207   void CmpObjectType(Register heap_object, InstanceType type, Register map);
    208 
    209   // Compare instance type for map.
    210   void CmpInstanceType(Register map, InstanceType type);
    211 
    212   // Check if the map of an object is equal to a specified map and
    213   // branch to label if not. Skip the smi check if not required
    214   // (object is known to be a heap object)
    215   void CheckMap(Register obj,
    216                 Handle<Map> map,
    217                 Label* fail,
    218                 bool is_heap_object);
    219 
    220   // Check if the object in register heap_object is a string. Afterwards the
    221   // register map contains the object map and the register instance_type
    222   // contains the instance_type. The registers map and instance_type can be the
    223   // same in which case it contains the instance type afterwards. Either of the
    224   // registers map and instance_type can be the same as heap_object.
    225   Condition IsObjectStringType(Register heap_object,
    226                                Register map,
    227                                Register instance_type);
    228 
    229   // Check if a heap object's type is in the JSObject range, not including
    230   // JSFunction.  The object's map will be loaded in the map register.
    231   // Any or all of the three registers may be the same.
    232   // The contents of the scratch register will always be overwritten.
    233   void IsObjectJSObjectType(Register heap_object,
    234                             Register map,
    235                             Register scratch,
    236                             Label* fail);
    237 
    238   // The contents of the scratch register will be overwritten.
    239   void IsInstanceJSObjectType(Register map, Register scratch, Label* fail);
    240 
    241   // FCmp is similar to integer cmp, but requires unsigned
    242   // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
    243   void FCmp();
    244 
    245   // Smi tagging support.
    246   void SmiTag(Register reg) {
    247     ASSERT(kSmiTag == 0);
    248     ASSERT(kSmiTagSize == 1);
    249     add(reg, Operand(reg));
    250   }
    251   void SmiUntag(Register reg) {
    252     sar(reg, kSmiTagSize);
    253   }
    254 
    255   // Modifies the register even if it does not contain a Smi!
    256   void SmiUntag(Register reg, TypeInfo info, Label* non_smi) {
    257     ASSERT(kSmiTagSize == 1);
    258     sar(reg, kSmiTagSize);
    259     if (info.IsSmi()) {
    260       ASSERT(kSmiTag == 0);
    261       j(carry, non_smi);
    262     }
    263   }
    264 
    265   // Modifies the register even if it does not contain a Smi!
    266   void SmiUntag(Register reg, Label* is_smi) {
    267     ASSERT(kSmiTagSize == 1);
    268     sar(reg, kSmiTagSize);
    269     ASSERT(kSmiTag == 0);
    270     j(not_carry, is_smi);
    271   }
    272 
    273   // Jump the register contains a smi.
    274   inline void JumpIfSmi(Register value, Label* smi_label) {
    275     test(value, Immediate(kSmiTagMask));
    276     j(zero, smi_label, not_taken);
    277   }
    278   // Jump if register contain a non-smi.
    279   inline void JumpIfNotSmi(Register value, Label* not_smi_label) {
    280     test(value, Immediate(kSmiTagMask));
    281     j(not_zero, not_smi_label, not_taken);
    282   }
    283 
    284   // Assumes input is a heap object.
    285   void JumpIfNotNumber(Register reg, TypeInfo info, Label* on_not_number);
    286 
    287   // Assumes input is a heap number.  Jumps on things out of range.  Also jumps
    288   // on the min negative int32.  Ignores frational parts.
    289   void ConvertToInt32(Register dst,
    290                       Register src,      // Can be the same as dst.
    291                       Register scratch,  // Can be no_reg or dst, but not src.
    292                       TypeInfo info,
    293                       Label* on_not_int32);
    294 
    295   void LoadPowerOf2(XMMRegister dst, Register scratch, int power);
    296 
    297   // Abort execution if argument is not a number. Used in debug code.
    298   void AbortIfNotNumber(Register object);
    299 
    300   // Abort execution if argument is not a smi. Used in debug code.
    301   void AbortIfNotSmi(Register object);
    302 
    303   // Abort execution if argument is a smi. Used in debug code.
    304   void AbortIfSmi(Register object);
    305 
    306   // Abort execution if argument is a string. Used in debug code.
    307   void AbortIfNotString(Register object);
    308 
    309   // ---------------------------------------------------------------------------
    310   // Exception handling
    311 
    312   // Push a new try handler and link into try handler chain.  The return
    313   // address must be pushed before calling this helper.
    314   void PushTryHandler(CodeLocation try_location, HandlerType type);
    315 
    316   // Unlink the stack handler on top of the stack from the try handler chain.
    317   void PopTryHandler();
    318 
    319   // Activate the top handler in the try hander chain.
    320   void Throw(Register value);
    321 
    322   void ThrowUncatchable(UncatchableExceptionType type, Register value);
    323 
    324   // ---------------------------------------------------------------------------
    325   // Inline caching support
    326 
    327   // Generate code for checking access rights - used for security checks
    328   // on access to global objects across environments. The holder register
    329   // is left untouched, but the scratch register is clobbered.
    330   void CheckAccessGlobalProxy(Register holder_reg,
    331                               Register scratch,
    332                               Label* miss);
    333 
    334 
    335   // ---------------------------------------------------------------------------
    336   // Allocation support
    337 
    338   // Allocate an object in new space. If the new space is exhausted control
    339   // continues at the gc_required label. The allocated object is returned in
    340   // result and end of the new object is returned in result_end. The register
    341   // scratch can be passed as no_reg in which case an additional object
    342   // reference will be added to the reloc info. The returned pointers in result
    343   // and result_end have not yet been tagged as heap objects. If
    344   // result_contains_top_on_entry is true the content of result is known to be
    345   // the allocation top on entry (could be result_end from a previous call to
    346   // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
    347   // should be no_reg as it is never used.
    348   void AllocateInNewSpace(int object_size,
    349                           Register result,
    350                           Register result_end,
    351                           Register scratch,
    352                           Label* gc_required,
    353                           AllocationFlags flags);
    354 
    355   void AllocateInNewSpace(int header_size,
    356                           ScaleFactor element_size,
    357                           Register element_count,
    358                           Register result,
    359                           Register result_end,
    360                           Register scratch,
    361                           Label* gc_required,
    362                           AllocationFlags flags);
    363 
    364   void AllocateInNewSpace(Register object_size,
    365                           Register result,
    366                           Register result_end,
    367                           Register scratch,
    368                           Label* gc_required,
    369                           AllocationFlags flags);
    370 
    371   // Undo allocation in new space. The object passed and objects allocated after
    372   // it will no longer be allocated. Make sure that no pointers are left to the
    373   // object(s) no longer allocated as they would be invalid when allocation is
    374   // un-done.
    375   void UndoAllocationInNewSpace(Register object);
    376 
    377   // Allocate a heap number in new space with undefined value. The
    378   // register scratch2 can be passed as no_reg; the others must be
    379   // valid registers. Returns tagged pointer in result register, or
    380   // jumps to gc_required if new space is full.
    381   void AllocateHeapNumber(Register result,
    382                           Register scratch1,
    383                           Register scratch2,
    384                           Label* gc_required);
    385 
    386   // Allocate a sequential string. All the header fields of the string object
    387   // are initialized.
    388   void AllocateTwoByteString(Register result,
    389                              Register length,
    390                              Register scratch1,
    391                              Register scratch2,
    392                              Register scratch3,
    393                              Label* gc_required);
    394   void AllocateAsciiString(Register result,
    395                            Register length,
    396                            Register scratch1,
    397                            Register scratch2,
    398                            Register scratch3,
    399                            Label* gc_required);
    400   void AllocateAsciiString(Register result,
    401                            int length,
    402                            Register scratch1,
    403                            Register scratch2,
    404                            Label* gc_required);
    405 
    406   // Allocate a raw cons string object. Only the map field of the result is
    407   // initialized.
    408   void AllocateConsString(Register result,
    409                           Register scratch1,
    410                           Register scratch2,
    411                           Label* gc_required);
    412   void AllocateAsciiConsString(Register result,
    413                                Register scratch1,
    414                                Register scratch2,
    415                                Label* gc_required);
    416 
    417   // Copy memory, byte-by-byte, from source to destination.  Not optimized for
    418   // long or aligned copies.
    419   // The contents of index and scratch are destroyed.
    420   void CopyBytes(Register source,
    421                  Register destination,
    422                  Register length,
    423                  Register scratch);
    424 
    425   // ---------------------------------------------------------------------------
    426   // Support functions.
    427 
    428   // Check if result is zero and op is negative.
    429   void NegativeZeroTest(Register result, Register op, Label* then_label);
    430 
    431   // Check if result is zero and any of op1 and op2 are negative.
    432   // Register scratch is destroyed, and it must be different from op2.
    433   void NegativeZeroTest(Register result, Register op1, Register op2,
    434                         Register scratch, Label* then_label);
    435 
    436   // Try to get function prototype of a function and puts the value in
    437   // the result register. Checks that the function really is a
    438   // function and jumps to the miss label if the fast checks fail. The
    439   // function register will be untouched; the other registers may be
    440   // clobbered.
    441   void TryGetFunctionPrototype(Register function,
    442                                Register result,
    443                                Register scratch,
    444                                Label* miss);
    445 
    446   // Generates code for reporting that an illegal operation has
    447   // occurred.
    448   void IllegalOperation(int num_arguments);
    449 
    450   // Picks out an array index from the hash field.
    451   // Register use:
    452   //   hash - holds the index's hash. Clobbered.
    453   //   index - holds the overwritten index on exit.
    454   void IndexFromHash(Register hash, Register index);
    455 
    456   // ---------------------------------------------------------------------------
    457   // Runtime calls
    458 
    459   // Call a code stub.  Generate the code if necessary.
    460   void CallStub(CodeStub* stub);
    461 
    462   // Call a code stub and return the code object called.  Try to generate
    463   // the code if necessary.  Do not perform a GC but instead return a retry
    464   // after GC failure.
    465   MUST_USE_RESULT MaybeObject* TryCallStub(CodeStub* stub);
    466 
    467   // Tail call a code stub (jump).  Generate the code if necessary.
    468   void TailCallStub(CodeStub* stub);
    469 
    470   // Tail call a code stub (jump) and return the code object called.  Try to
    471   // generate the code if necessary.  Do not perform a GC but instead return
    472   // a retry after GC failure.
    473   MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub);
    474 
    475   // Return from a code stub after popping its arguments.
    476   void StubReturn(int argc);
    477 
    478   // Call a runtime routine.
    479   void CallRuntime(const Runtime::Function* f, int num_arguments);
    480   void CallRuntimeSaveDoubles(Runtime::FunctionId id);
    481 
    482   // Call a runtime function, returning the CodeStub object called.
    483   // Try to generate the stub code if necessary.  Do not perform a GC
    484   // but instead return a retry after GC failure.
    485   MUST_USE_RESULT MaybeObject* TryCallRuntime(const Runtime::Function* f,
    486                                               int num_arguments);
    487 
    488   // Convenience function: Same as above, but takes the fid instead.
    489   void CallRuntime(Runtime::FunctionId id, int num_arguments);
    490 
    491   // Convenience function: Same as above, but takes the fid instead.
    492   MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::FunctionId id,
    493                                               int num_arguments);
    494 
    495   // Convenience function: call an external reference.
    496   void CallExternalReference(ExternalReference ref, int num_arguments);
    497 
    498   // Tail call of a runtime routine (jump).
    499   // Like JumpToExternalReference, but also takes care of passing the number
    500   // of parameters.
    501   void TailCallExternalReference(const ExternalReference& ext,
    502                                  int num_arguments,
    503                                  int result_size);
    504 
    505   // Tail call of a runtime routine (jump). Try to generate the code if
    506   // necessary. Do not perform a GC but instead return a retry after GC failure.
    507   MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
    508       const ExternalReference& ext, int num_arguments, int result_size);
    509 
    510   // Convenience function: tail call a runtime routine (jump).
    511   void TailCallRuntime(Runtime::FunctionId fid,
    512                        int num_arguments,
    513                        int result_size);
    514 
    515   // Convenience function: tail call a runtime routine (jump). Try to generate
    516   // the code if necessary. Do not perform a GC but instead return a retry after
    517   // GC failure.
    518   MUST_USE_RESULT MaybeObject* TryTailCallRuntime(Runtime::FunctionId fid,
    519                                                   int num_arguments,
    520                                                   int result_size);
    521 
    522   // Before calling a C-function from generated code, align arguments on stack.
    523   // After aligning the frame, arguments must be stored in esp[0], esp[4],
    524   // etc., not pushed. The argument count assumes all arguments are word sized.
    525   // Some compilers/platforms require the stack to be aligned when calling
    526   // C++ code.
    527   // Needs a scratch register to do some arithmetic. This register will be
    528   // trashed.
    529   void PrepareCallCFunction(int num_arguments, Register scratch);
    530 
    531   // Calls a C function and cleans up the space for arguments allocated
    532   // by PrepareCallCFunction. The called function is not allowed to trigger a
    533   // garbage collection, since that might move the code and invalidate the
    534   // return address (unless this is somehow accounted for by the called
    535   // function).
    536   void CallCFunction(ExternalReference function, int num_arguments);
    537   void CallCFunction(Register function, int num_arguments);
    538 
    539   // Prepares stack to put arguments (aligns and so on). Reserves
    540   // space for return value if needed (assumes the return value is a handle).
    541   // Uses callee-saved esi to restore stack state after call. Arguments must be
    542   // stored in ApiParameterOperand(0), ApiParameterOperand(1) etc. Saves
    543   // context (esi).
    544   void PrepareCallApiFunction(int argc, Register scratch);
    545 
    546   // Calls an API function. Allocates HandleScope, extracts
    547   // returned value from handle and propagates exceptions.
    548   // Clobbers ebx, edi and caller-save registers. Restores context.
    549   // On return removes stack_space * kPointerSize (GCed).
    550   MaybeObject* TryCallApiFunctionAndReturn(ApiFunction* function,
    551                                            int stack_space);
    552 
    553   // Jump to a runtime routine.
    554   void JumpToExternalReference(const ExternalReference& ext);
    555 
    556   MaybeObject* TryJumpToExternalReference(const ExternalReference& ext);
    557 
    558 
    559   // ---------------------------------------------------------------------------
    560   // Utilities
    561 
    562   void Ret();
    563 
    564   // Return and drop arguments from stack, where the number of arguments
    565   // may be bigger than 2^16 - 1.  Requires a scratch register.
    566   void Ret(int bytes_dropped, Register scratch);
    567 
    568   // Emit code to discard a non-negative number of pointer-sized elements
    569   // from the stack, clobbering only the esp register.
    570   void Drop(int element_count);
    571 
    572   void Call(Label* target) { call(target); }
    573 
    574   // Emit call to the code we are currently generating.
    575   void CallSelf() {
    576     Handle<Code> self(reinterpret_cast<Code**>(CodeObject().location()));
    577     call(self, RelocInfo::CODE_TARGET);
    578   }
    579 
    580   // Move if the registers are not identical.
    581   void Move(Register target, Register source);
    582 
    583   void Move(Register target, Handle<Object> value);
    584 
    585   Handle<Object> CodeObject() {
    586     ASSERT(!code_object_.is_null());
    587     return code_object_;
    588   }
    589 
    590 
    591   // ---------------------------------------------------------------------------
    592   // StatsCounter support
    593 
    594   void SetCounter(StatsCounter* counter, int value);
    595   void IncrementCounter(StatsCounter* counter, int value);
    596   void DecrementCounter(StatsCounter* counter, int value);
    597   void IncrementCounter(Condition cc, StatsCounter* counter, int value);
    598   void DecrementCounter(Condition cc, StatsCounter* counter, int value);
    599 
    600 
    601   // ---------------------------------------------------------------------------
    602   // Debugging
    603 
    604   // Calls Abort(msg) if the condition cc is not satisfied.
    605   // Use --debug_code to enable.
    606   void Assert(Condition cc, const char* msg);
    607 
    608   void AssertFastElements(Register elements);
    609 
    610   // Like Assert(), but always enabled.
    611   void Check(Condition cc, const char* msg);
    612 
    613   // Print a message to stdout and abort execution.
    614   void Abort(const char* msg);
    615 
    616   // Check that the stack is aligned.
    617   void CheckStackAlignment();
    618 
    619   // Verify restrictions about code generated in stubs.
    620   void set_generating_stub(bool value) { generating_stub_ = value; }
    621   bool generating_stub() { return generating_stub_; }
    622   void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
    623   bool allow_stub_calls() { return allow_stub_calls_; }
    624 
    625   // ---------------------------------------------------------------------------
    626   // String utilities.
    627 
    628   // Check whether the instance type represents a flat ascii string. Jump to the
    629   // label if not. If the instance type can be scratched specify same register
    630   // for both instance type and scratch.
    631   void JumpIfInstanceTypeIsNotSequentialAscii(Register instance_type,
    632                                               Register scratch,
    633                                               Label* on_not_flat_ascii_string);
    634 
    635   // Checks if both objects are sequential ASCII strings, and jumps to label
    636   // if either is not.
    637   void JumpIfNotBothSequentialAsciiStrings(Register object1,
    638                                            Register object2,
    639                                            Register scratch1,
    640                                            Register scratch2,
    641                                            Label* on_not_flat_ascii_strings);
    642 
    643   static int SafepointRegisterStackIndex(Register reg) {
    644     return SafepointRegisterStackIndex(reg.code());
    645   }
    646 
    647  private:
    648   bool generating_stub_;
    649   bool allow_stub_calls_;
    650   // This handle will be patched with the code object on installation.
    651   Handle<Object> code_object_;
    652 
    653   // Helper functions for generating invokes.
    654   void InvokePrologue(const ParameterCount& expected,
    655                       const ParameterCount& actual,
    656                       Handle<Code> code_constant,
    657                       const Operand& code_operand,
    658                       NearLabel* done,
    659                       InvokeFlag flag,
    660                       PostCallGenerator* post_call_generator = NULL);
    661 
    662   // Activation support.
    663   void EnterFrame(StackFrame::Type type);
    664   void LeaveFrame(StackFrame::Type type);
    665 
    666   void EnterExitFramePrologue();
    667   void EnterExitFrameEpilogue(int argc, bool save_doubles);
    668 
    669   void LeaveExitFrameEpilogue();
    670 
    671   // Allocation support helpers.
    672   void LoadAllocationTopHelper(Register result,
    673                                Register scratch,
    674                                AllocationFlags flags);
    675   void UpdateAllocationTopHelper(Register result_end, Register scratch);
    676 
    677   // Helper for PopHandleScope.  Allowed to perform a GC and returns
    678   // NULL if gc_allowed.  Does not perform a GC if !gc_allowed, and
    679   // possibly returns a failure object indicating an allocation failure.
    680   MUST_USE_RESULT MaybeObject* PopHandleScopeHelper(Register saved,
    681                                                     Register scratch,
    682                                                     bool gc_allowed);
    683 
    684 
    685   // Compute memory operands for safepoint stack slots.
    686   Operand SafepointRegisterSlot(Register reg);
    687   static int SafepointRegisterStackIndex(int reg_code);
    688 
    689   // Needs access to SafepointRegisterStackIndex for optimized frame
    690   // traversal.
    691   friend class OptimizedFrame;
    692 };
    693 
    694 
    695 template <typename LabelType>
    696 void MacroAssembler::InNewSpace(Register object,
    697                                 Register scratch,
    698                                 Condition cc,
    699                                 LabelType* branch) {
    700   ASSERT(cc == equal || cc == not_equal);
    701   if (Serializer::enabled()) {
    702     // Can't do arithmetic on external references if it might get serialized.
    703     mov(scratch, Operand(object));
    704     // The mask isn't really an address.  We load it as an external reference in
    705     // case the size of the new space is different between the snapshot maker
    706     // and the running system.
    707     and_(Operand(scratch),
    708          Immediate(ExternalReference::new_space_mask(isolate())));
    709     cmp(Operand(scratch),
    710         Immediate(ExternalReference::new_space_start(isolate())));
    711     j(cc, branch);
    712   } else {
    713     int32_t new_space_start = reinterpret_cast<int32_t>(
    714         ExternalReference::new_space_start(isolate()).address());
    715     lea(scratch, Operand(object, -new_space_start));
    716     and_(scratch, isolate()->heap()->NewSpaceMask());
    717     j(cc, branch);
    718   }
    719 }
    720 
    721 
    722 // The code patcher is used to patch (typically) small parts of code e.g. for
    723 // debugging and other types of instrumentation. When using the code patcher
    724 // the exact number of bytes specified must be emitted. Is not legal to emit
    725 // relocation information. If any of these constraints are violated it causes
    726 // an assertion.
    727 class CodePatcher {
    728  public:
    729   CodePatcher(byte* address, int size);
    730   virtual ~CodePatcher();
    731 
    732   // Macro assembler to emit code.
    733   MacroAssembler* masm() { return &masm_; }
    734 
    735  private:
    736   byte* address_;  // The address of the code being patched.
    737   int size_;  // Number of bytes of the expected patch size.
    738   MacroAssembler masm_;  // Macro assembler used to generate the code.
    739 };
    740 
    741 
    742 // Helper class for generating code or data associated with the code
    743 // right after a call instruction. As an example this can be used to
    744 // generate safepoint data after calls for crankshaft.
    745 class PostCallGenerator {
    746  public:
    747   PostCallGenerator() { }
    748   virtual ~PostCallGenerator() { }
    749   virtual void Generate() = 0;
    750 };
    751 
    752 
    753 // -----------------------------------------------------------------------------
    754 // Static helper functions.
    755 
    756 // Generate an Operand for loading a field from an object.
    757 static inline Operand FieldOperand(Register object, int offset) {
    758   return Operand(object, offset - kHeapObjectTag);
    759 }
    760 
    761 
    762 // Generate an Operand for loading an indexed field from an object.
    763 static inline Operand FieldOperand(Register object,
    764                                    Register index,
    765                                    ScaleFactor scale,
    766                                    int offset) {
    767   return Operand(object, index, scale, offset - kHeapObjectTag);
    768 }
    769 
    770 
    771 static inline Operand ContextOperand(Register context, int index) {
    772   return Operand(context, Context::SlotOffset(index));
    773 }
    774 
    775 
    776 static inline Operand GlobalObjectOperand() {
    777   return ContextOperand(esi, Context::GLOBAL_INDEX);
    778 }
    779 
    780 
    781 // Generates an Operand for saving parameters after PrepareCallApiFunction.
    782 Operand ApiParameterOperand(int index);
    783 
    784 
    785 #ifdef GENERATED_CODE_COVERAGE
    786 extern void LogGeneratedCodeCoverage(const char* file_line);
    787 #define CODE_COVERAGE_STRINGIFY(x) #x
    788 #define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
    789 #define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
    790 #define ACCESS_MASM(masm) {                                               \
    791     byte* ia32_coverage_function =                                        \
    792         reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
    793     masm->pushfd();                                                       \
    794     masm->pushad();                                                       \
    795     masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__)));         \
    796     masm->call(ia32_coverage_function, RelocInfo::RUNTIME_ENTRY);         \
    797     masm->pop(eax);                                                       \
    798     masm->popad();                                                        \
    799     masm->popfd();                                                        \
    800   }                                                                       \
    801   masm->
    802 #else
    803 #define ACCESS_MASM(masm) masm->
    804 #endif
    805 
    806 
    807 } }  // namespace v8::internal
    808 
    809 #endif  // V8_IA32_MACRO_ASSEMBLER_IA32_H_
    810