Home | History | Annotate | Download | only in m_dispatch
      1 
      2 /*--------------------------------------------------------------------*/
      3 /*--- The core dispatch loop, for jumping to a code address.       ---*/
      4 /*---                                        dispatch-x86-darwin.S ---*/
      5 /*--------------------------------------------------------------------*/
      6 
      7 /*
      8   This file is part of Valgrind, a dynamic binary instrumentation
      9   framework.
     10 
     11   Copyright (C) 2000-2012 Julian Seward
     12      jseward (at) acm.org
     13 
     14   This program is free software; you can redistribute it and/or
     15   modify it under the terms of the GNU General Public License as
     16   published by the Free Software Foundation; either version 2 of the
     17   License, or (at your option) any later version.
     18 
     19   This program is distributed in the hope that it will be useful, but
     20   WITHOUT ANY WARRANTY; without even the implied warranty of
     21   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
     22   General Public License for more details.
     23 
     24   You should have received a copy of the GNU General Public License
     25   along with this program; if not, write to the Free Software
     26   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
     27   02111-1307, USA.
     28 
     29   The GNU General Public License is contained in the file COPYING.
     30 */
     31 
     32 #if defined(VGP_x86_darwin)
     33 
     34 #include "pub_core_basics_asm.h"
     35 #include "pub_core_dispatch_asm.h"
     36 #include "pub_core_transtab_asm.h"
     37 #include "libvex_guest_offsets.h"	/* for OFFSET_x86_EIP */
     38 
     39 
     40 /*------------------------------------------------------------*/
     41 /*---                                                      ---*/
     42 /*--- The dispatch loop.  VG_(disp_run_translations) is    ---*/
     43 /*--- used to run all translations,                        ---*/
     44 /*--- including no-redir ones.                             ---*/
     45 /*---                                                      ---*/
     46 /*------------------------------------------------------------*/
     47 
     48 /*----------------------------------------------------*/
     49 /*--- Entry and preamble (set everything up)       ---*/
     50 /*----------------------------------------------------*/
     51 
     52 /* signature:
     53 void VG_(disp_run_translations)( UWord* two_words,
     54                                  void*  guest_state,
     55                                  Addr   host_addr );
     56 */
     57 .text
     58 .globl VG_(disp_run_translations)
     59 VG_(disp_run_translations):
     60         /* 0(%esp) holds our return address. */
     61 	/* 4(%esp) holds two_words */
     62 	/* 8(%esp) holds guest_state */
     63 	/* 12(%esp) holds host_addr */
     64 
     65         /* The preamble */
     66 
     67         /* Save integer registers, since this is a pseudo-function. */
     68         pushl   %eax
     69 	pushl	%ebx
     70 	pushl	%ecx
     71 	pushl	%edx
     72 	pushl	%esi
     73 	pushl	%edi
     74 	pushl	%ebp
     75 
     76 	/* 28+4(%esp) holds two_words */
     77 	/* 28+8(%esp) holds guest_state */
     78 	/* 28+12(%esp) holds host_addr */
     79 
     80         /* Get the host CPU in the state expected by generated code. */
     81 
     82 	/* set host FPU control word to the default mode expected
     83            by VEX-generated code.  See comments in libvex.h for
     84            more info. */
     85 	finit
     86 	pushl	$0x027F
     87 	fldcw	(%esp)
     88 	addl	$4, %esp
     89 
     90 	/* set host SSE control word to the default mode expected
     91 	   by VEX-generated code. */
     92 	cmpl	$0, VG_(machine_x86_have_mxcsr)
     93 	jz	L1
     94 	pushl	$0x1F80
     95 	ldmxcsr	(%esp)
     96 	addl	$4, %esp
     97 L1:
     98 	/* set dir flag to known value */
     99 	cld
    100 
    101 	/* Set up the guest state pointer */
    102 	movl	28+8(%esp), %ebp
    103 
    104         /* and jump into the code cache.  Chained translations in
    105            the code cache run, until for whatever reason, they can't
    106            continue.  When that happens, the translation in question
    107            will jump (or call) to one of the continuation points
    108            VG_(cp_...) below. */
    109         jmpl    *28+12(%esp)
    110 	/*NOTREACHED*/
    111 
    112 /*----------------------------------------------------*/
    113 /*--- Postamble and exit.                          ---*/
    114 /*----------------------------------------------------*/
    115 
    116 postamble:
    117         /* At this point, %eax and %edx contain two
    118            words to be returned to the caller.  %eax
    119            holds a TRC value, and %edx optionally may
    120            hold another word (for CHAIN_ME exits, the
    121            address of the place to patch.) */
    122 
    123 	/* We're leaving.  Check that nobody messed with %mxcsr
    124            or %fpucw.  We can't mess with %eax or %edx here as they
    125 	   holds the tentative return value, but any others are OK. */
    126 #if !defined(ENABLE_INNER)
    127         /* This check fails for self-hosting, so skip in that case */
    128 	pushl	$0
    129 	fstcw	(%esp)
    130 	cmpl	$0x027F, (%esp)
    131 	popl	%esi /* get rid of the word without trashing %eflags */
    132 	jnz	invariant_violation
    133 #endif
    134 #	cmpl	$0, VG_(machine_x86_have_mxcsr)
    135 	jz	L2
    136 	pushl	$0
    137 	stmxcsr	(%esp)
    138 	andl	$0xFFFFFFC0, (%esp)  /* mask out status flags */
    139 	cmpl	$0x1F80, (%esp)
    140 	popl	%esi
    141 	jnz	invariant_violation
    142 L2:	/* otherwise we're OK */
    143 	jmp	remove_frame
    144 invariant_violation:
    145 	movl	$VG_TRC_INVARIANT_FAILED, %eax
    146         movl    $0, %edx
    147 
    148 remove_frame:
    149         /* Stash return values */
    150         movl    28+4(%esp), %edi        /* two_words */
    151         movl    %eax, 0(%edi)
    152         movl    %edx, 4(%edi)
    153         /* Restore int regs and return. */
    154 	popl	%ebp
    155 	popl	%edi
    156 	popl	%esi
    157 	popl	%edx
    158 	popl	%ecx
    159 	popl	%ebx
    160 	popl	%eax
    161 	ret
    162 
    163 /*----------------------------------------------------*/
    164 /*--- Continuation points                          ---*/
    165 /*----------------------------------------------------*/
    166 
    167 /* ------ Chain me to slow entry point ------ */
    168 .globl VG_(disp_cp_chain_me_to_slowEP)
    169 VG_(disp_cp_chain_me_to_slowEP):
    170         /* We got called.  The return address indicates
    171            where the patching needs to happen.  Collect
    172            the return address and, exit back to C land,
    173            handing the caller the pair (Chain_me_S, RA) */
    174         movl    $VG_TRC_CHAIN_ME_TO_SLOW_EP, %eax
    175         popl    %edx
    176         /* 5 = movl $VG_(disp_chain_me_to_slowEP), %edx;
    177            2 = call *%edx */
    178         subl    $5+2, %edx
    179         jmp     postamble
    180 
    181 /* ------ Chain me to fast entry point ------ */
    182 .globl VG_(disp_cp_chain_me_to_fastEP)
    183 VG_(disp_cp_chain_me_to_fastEP):
    184         /* We got called.  The return address indicates
    185            where the patching needs to happen.  Collect
    186            the return address and, exit back to C land,
    187            handing the caller the pair (Chain_me_F, RA) */
    188         movl    $VG_TRC_CHAIN_ME_TO_FAST_EP, %eax
    189         popl    %edx
    190         /* 5 = movl $VG_(disp_chain_me_to_fastEP), %edx;
    191            2 = call *%edx */
    192         subl    $5+2, %edx
    193         jmp     postamble
    194 
    195 /* ------ Indirect but boring jump ------ */
    196 .globl VG_(disp_cp_xindir)
    197 VG_(disp_cp_xindir):
    198 	/* Where are we going? */
    199 	movl	OFFSET_x86_EIP(%ebp), %eax
    200 
    201         /* stats only */
    202         addl    $1, VG_(stats__n_xindirs_32)
    203 
    204         /* try a fast lookup in the translation cache */
    205         movl    %eax, %ebx                      /* next guest addr */
    206         andl    $VG_TT_FAST_MASK, %ebx          /* entry# */
    207         movl    0+VG_(tt_fast)(,%ebx,8), %esi   /* .guest */
    208         movl    4+VG_(tt_fast)(,%ebx,8), %edi   /* .host */
    209         cmpl    %eax, %esi
    210         jnz     fast_lookup_failed
    211 
    212         /* Found a match.  Jump to .host. */
    213 	jmp 	*%edi
    214 	ud2	/* persuade insn decoders not to speculate past here */
    215 
    216 fast_lookup_failed:
    217         /* stats only */
    218         addl    $1, VG_(stats__n_xindir_misses_32)
    219 
    220 	movl	$VG_TRC_INNER_FASTMISS, %eax
    221         movl    $0, %edx
    222 	jmp	postamble
    223 
    224 /* ------ Assisted jump ------ */
    225 .globl VG_(disp_cp_xassisted)
    226 VG_(disp_cp_xassisted):
    227         /* %ebp contains the TRC */
    228         movl    %ebp, %eax
    229         movl    $0, %edx
    230         jmp     postamble
    231 
    232 /* ------ Event check failed ------ */
    233 .globl VG_(disp_cp_evcheck_fail)
    234 VG_(disp_cp_evcheck_fail):
    235        	movl	$VG_TRC_INNER_COUNTERZERO, %eax
    236         movl    $0, %edx
    237 	jmp	postamble
    238 
    239 
    240 #endif // defined(VGP_x86_darwin)
    241 
    242 /*--------------------------------------------------------------------*/
    243 /*--- end                                                          ---*/
    244 /*--------------------------------------------------------------------*/
    245