Home | History | Annotate | Download | only in m_dispatch
      1 
      2 /*--------------------------------------------------------------------*/
      3 /*--- The core dispatch loop, for jumping to a code address.       ---*/
      4 /*---                                      dispatch-amd64-darwin.S ---*/
      5 /*--------------------------------------------------------------------*/
      6 
      7 /*
      8   This file is part of Valgrind, a dynamic binary instrumentation
      9   framework.
     10 
     11   Copyright (C) 2000-2012 Julian Seward
     12      jseward (at) acm.org
     13 
     14   This program is free software; you can redistribute it and/or
     15   modify it under the terms of the GNU General Public License as
     16   published by the Free Software Foundation; either version 2 of the
     17   License, or (at your option) any later version.
     18 
     19   This program is distributed in the hope that it will be useful, but
     20   WITHOUT ANY WARRANTY; without even the implied warranty of
     21   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
     22   General Public License for more details.
     23 
     24   You should have received a copy of the GNU General Public License
     25   along with this program; if not, write to the Free Software
     26   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
     27   02111-1307, USA.
     28 
     29   The GNU General Public License is contained in the file COPYING.
     30 */
     31 
     32 #if defined(VGP_amd64_darwin)
     33 
     34 #include "pub_core_basics_asm.h"
     35 #include "pub_core_dispatch_asm.h"
     36 #include "pub_core_transtab_asm.h"
     37 #include "libvex_guest_offsets.h"	/* for OFFSET_amd64_RIP */
     38 
     39 
     40 /*------------------------------------------------------------*/
     41 /*---                                                      ---*/
     42 /*--- The dispatch loop.  VG_(disp_run_translations) is    ---*/
     43 /*--- used to run all translations,                        ---*/
     44 /*--- including no-redir ones.                             ---*/
     45 /*---                                                      ---*/
     46 /*------------------------------------------------------------*/
     47 
     48 /*----------------------------------------------------*/
     49 /*--- Entry and preamble (set everything up)       ---*/
     50 /*----------------------------------------------------*/
     51 
     52 /* signature:
     53 void VG_(disp_run_translations)( UWord* two_words,
     54                                  void*  guest_state,
     55                                  Addr   host_addr );
     56 */
     57 .text
     58 .globl VG_(disp_run_translations)
     59 VG_(disp_run_translations):
     60         /* %rdi holds two_words    */
     61 	/* %rsi holds guest_state  */
     62 	/* %rdx holds host_addr    */
     63 
     64         /* The preamble */
     65 
     66         /* Save integer registers, since this is a pseudo-function. */
     67         pushq   %rax
     68 	pushq	%rbx
     69 	pushq	%rcx
     70         pushq   %rdx
     71 	pushq	%rsi
     72 	pushq	%rbp
     73 	pushq	%r8
     74 	pushq	%r9
     75 	pushq	%r10
     76 	pushq	%r11
     77 	pushq	%r12
     78 	pushq	%r13
     79 	pushq	%r14
     80 	pushq	%r15
     81         /* %rdi must be saved last */
     82 	pushq	%rdi
     83 
     84         /* Get the host CPU in the state expected by generated code. */
     85 
     86 	/* set host FPU control word to the default mode expected
     87            by VEX-generated code.  See comments in libvex.h for
     88            more info. */
     89 	finit
     90 	pushq	$0x027F
     91 	fldcw	(%rsp)
     92 	addq	$8, %rsp
     93 
     94 	/* set host SSE control word to the default mode expected
     95 	   by VEX-generated code. */
     96 	pushq	$0x1F80
     97 	ldmxcsr	(%rsp)
     98 	addq	$8, %rsp
     99 
    100 	/* set dir flag to known value */
    101 	cld
    102 
    103 	/* Set up the guest state pointer */
    104 	movq	%rsi, %rbp
    105 
    106         /* and jump into the code cache.  Chained translations in
    107            the code cache run, until for whatever reason, they can't
    108            continue.  When that happens, the translation in question
    109            will jump (or call) to one of the continuation points
    110            VG_(cp_...) below. */
    111         jmpq    *%rdx
    112        	/*NOTREACHED*/
    113 
    114 /*----------------------------------------------------*/
    115 /*--- Postamble and exit.                          ---*/
    116 /*----------------------------------------------------*/
    117 
    118 postamble:
    119         /* At this point, %rax and %rdx contain two
    120            words to be returned to the caller.  %rax
    121            holds a TRC value, and %rdx optionally may
    122            hold another word (for CHAIN_ME exits, the
    123            address of the place to patch.) */
    124 
    125 	/* We're leaving.  Check that nobody messed with %mxcsr
    126            or %fpucw.  We can't mess with %rax or %rdx here as they
    127            hold the tentative return values, but any others are OK. */
    128 #if !defined(ENABLE_INNER)
    129         /* This check fails for self-hosting, so skip in that case */
    130 	pushq	$0
    131 	fstcw	(%rsp)
    132 	cmpl	$0x027F, (%rsp)
    133 	popq	%r15 /* get rid of the word without trashing %rflags */
    134 	jnz	invariant_violation
    135 #endif
    136 	pushq	$0
    137 	stmxcsr	(%rsp)
    138 	andl	$0xFFFFFFC0, (%rsp)  /* mask out status flags */
    139 	cmpl	$0x1F80, (%rsp)
    140 	popq	%r15
    141 	jnz	invariant_violation
    142 	/* otherwise we're OK */
    143 	jmp	remove_frame
    144 invariant_violation:
    145 	movq	$VG_TRC_INVARIANT_FAILED, %rax
    146         movq    $0, %rdx
    147 
    148 remove_frame:
    149         /* Pop %rdi, stash return values */
    150 	popq	%rdi
    151         movq    %rax, 0(%rdi)
    152         movq    %rdx, 8(%rdi)
    153         /* Now pop everything else */
    154 	popq	%r15
    155 	popq	%r14
    156 	popq	%r13
    157 	popq	%r12
    158 	popq	%r11
    159 	popq	%r10
    160 	popq	%r9
    161 	popq	%r8
    162 	popq	%rbp
    163 	popq	%rsi
    164 	popq	%rdx
    165 	popq	%rcx
    166 	popq	%rbx
    167 	popq	%rax
    168 	ret
    169 
    170 /*----------------------------------------------------*/
    171 /*--- Continuation points                          ---*/
    172 /*----------------------------------------------------*/
    173 
    174 /* ------ Chain me to slow entry point ------ */
    175 .globl VG_(disp_cp_chain_me_to_slowEP)
    176 VG_(disp_cp_chain_me_to_slowEP):
    177         /* We got called.  The return address indicates
    178            where the patching needs to happen.  Collect
    179            the return address and, exit back to C land,
    180            handing the caller the pair (Chain_me_S, RA) */
    181         movq    $VG_TRC_CHAIN_ME_TO_SLOW_EP, %rax
    182         popq    %rdx
    183         /* 10 = movabsq $VG_(disp_chain_me_to_slowEP), %r11;
    184            3  = call *%r11 */
    185         subq    $10+3, %rdx
    186         jmp     postamble
    187 
    188 /* ------ Chain me to fast entry point ------ */
    189 .globl VG_(disp_cp_chain_me_to_fastEP)
    190 VG_(disp_cp_chain_me_to_fastEP):
    191         /* We got called.  The return address indicates
    192            where the patching needs to happen.  Collect
    193            the return address and, exit back to C land,
    194            handing the caller the pair (Chain_me_F, RA) */
    195         movq    $VG_TRC_CHAIN_ME_TO_FAST_EP, %rax
    196         popq    %rdx
    197         /* 10 = movabsq $VG_(disp_chain_me_to_fastEP), %r11;
    198            3  = call *%r11 */
    199         subq    $10+3, %rdx
    200         jmp     postamble
    201 
    202 /* ------ Indirect but boring jump ------ */
    203 .globl VG_(disp_cp_xindir)
    204 VG_(disp_cp_xindir):
    205 	/* Where are we going? */
    206 	movq	OFFSET_amd64_RIP(%rbp), %rax
    207 
    208         /* stats only */
    209         movabsq $VG_(stats__n_xindirs_32), %r10
    210         addl    $1, (%r10)
    211 
    212 	/* try a fast lookup in the translation cache */
    213 	movabsq $VG_(tt_fast), %rcx
    214 	movq	%rax, %rbx		/* next guest addr */
    215 	andq	$VG_TT_FAST_MASK, %rbx	/* entry# */
    216 	shlq	$4, %rbx		/* entry# * sizeof(FastCacheEntry) */
    217 	movq	0(%rcx,%rbx,1), %r10	/* .guest */
    218 	movq	8(%rcx,%rbx,1), %r11	/* .host */
    219 	cmpq	%rax, %r10
    220 	jnz	fast_lookup_failed
    221 
    222         /* Found a match.  Jump to .host. */
    223 	jmp 	*%r11
    224 	ud2	/* persuade insn decoders not to speculate past here */
    225 
    226 fast_lookup_failed:
    227         /* stats only */
    228         movabsq $VG_(stats__n_xindir_misses_32), %r10
    229         addl    $1, (%r10)
    230 
    231 	movq	$VG_TRC_INNER_FASTMISS, %rax
    232         movq    $0, %rdx
    233 	jmp	postamble
    234 
    235 /* ------ Assisted jump ------ */
    236 .globl VG_(disp_cp_xassisted)
    237 VG_(disp_cp_xassisted):
    238         /* %rbp contains the TRC */
    239         movq    %rbp, %rax
    240         movq    $0, %rdx
    241         jmp     postamble
    242 
    243 /* ------ Event check failed ------ */
    244 .globl VG_(disp_cp_evcheck_fail)
    245 VG_(disp_cp_evcheck_fail):
    246        	movq	$VG_TRC_INNER_COUNTERZERO, %rax
    247         movq    $0, %rdx
    248 	jmp	postamble
    249 
    250 
    251 #endif // defined(VGP_amd64_darwin)
    252 
    253 /*--------------------------------------------------------------------*/
    254 /*--- end                                                          ---*/
    255 /*--------------------------------------------------------------------*/
    256