Home | History | Annotate | Download | only in mips
      1 #include "../common/asm-constants.h"
      2 #include "../common/mips-defines.h"
      3 #include <asm/regdef.h>
      4 #include <asm/fpregdef.h>
      5 
      6 #ifdef __mips_hard_float
      7 #define HARD_FLOAT
      8 #else
      9 #define SOFT_FLOAT
     10 #endif
     11 
     12 #if (__mips==32) && (__mips_isa_rev>=2)
     13 #define MIPS32R2
     14 #endif
     15 
     16 /* MIPS definitions and declarations
     17 
     18    reg	nick		purpose
     19    s0	rPC		interpreted program counter, used for fetching instructions
     20    s1	rFP		interpreted frame pointer, used for accessing locals and args
     21    s2	rSELF		self (Thread) pointer
     22    s3	rIBASE		interpreted instruction base pointer, used for computed goto
     23    s4	rINST		first 16-bit code unit of current instruction
     24 */
     25 
     26 
     27 /* single-purpose registers, given names for clarity */
     28 #define rPC s0
     29 #define rFP s1
     30 #define rSELF s2
     31 #define rIBASE s3
     32 #define rINST s4
     33 #define rOBJ s5
     34 #define rBIX s6
     35 #define rTEMP s7
     36 
     37 /* The long arguments sent to function calls in Big-endian mode should be register
     38 swapped when sent to functions in little endian mode. In other words long variable
     39 sent as a0(MSW), a1(LSW) for a function call in LE mode should be sent as a1, a0 in
     40 Big Endian mode */
     41 
     42 #ifdef HAVE_LITTLE_ENDIAN
     43 #define rARG0 a0
     44 #define rARG1 a1
     45 #define rARG2 a2
     46 #define rARG3 a3
     47 #define rRESULT0 v0
     48 #define rRESULT1 v1
     49 #else
     50 #define rARG0 a1
     51 #define rARG1 a0
     52 #define rARG2 a3
     53 #define rARG3 a2
     54 #define rRESULT0 v1
     55 #define rRESULT1 v0
     56 #endif
     57 
     58 
     59 /* save/restore the PC and/or FP from the glue struct */
     60 #define LOAD_PC_FROM_SELF() lw rPC, offThread_pc(rSELF)
     61 #define SAVE_PC_TO_SELF() sw rPC, offThread_pc(rSELF)
     62 #define LOAD_FP_FROM_SELF() lw rFP, offThread_curFrame(rSELF)
     63 #define SAVE_FP_TO_SELF() sw rFP, offThread_curFrame(rSELF)
     64 #define LOAD_PC_FP_FROM_SELF() \
     65 	LOAD_PC_FROM_SELF();   \
     66 	LOAD_FP_FROM_SELF()
     67 #define SAVE_PC_FP_TO_SELF()   \
     68 	SAVE_PC_TO_SELF();     \
     69 	SAVE_FP_TO_SELF()
     70 
     71 #define EXPORT_PC() \
     72     sw        rPC, (offStackSaveArea_currentPc - sizeofStackSaveArea)(rFP)
     73 
     74 #define SAVEAREA_FROM_FP(rd, _fpreg) \
     75     subu      rd, _fpreg, sizeofStackSaveArea
     76 
     77 #define FETCH_INST() lhu rINST, (rPC)
     78 
     79 #define FETCH_ADVANCE_INST(_count) lhu rINST, ((_count)*2)(rPC); \
     80     addu      rPC, rPC, ((_count) * 2)
     81 
     82 #define PREFETCH_ADVANCE_INST(_dreg, _sreg, _count) \
     83     lhu       _dreg, ((_count)*2)(_sreg) ;            \
     84     addu      _sreg, _sreg, (_count)*2
     85 
     86 #define FETCH_ADVANCE_INST_RB(rd) addu rPC, rPC, rd; \
     87     lhu       rINST, (rPC)
     88 
     89 #define FETCH(rd, _count) lhu rd, ((_count) * 2)(rPC)
     90 #define FETCH_S(rd, _count) lh rd, ((_count) * 2)(rPC)
     91 
     92 #ifdef HAVE_LITTLE_ENDIAN
     93 
     94 #define FETCH_B(rd, _count) lbu rd, ((_count) * 2)(rPC)
     95 #define FETCH_C(rd, _count) lbu rd, ((_count) * 2 + 1)(rPC)
     96 
     97 #else
     98 
     99 #define FETCH_B(rd, _count) lbu rd, ((_count) * 2 + 1)(rPC)
    100 #define FETCH_C(rd, _count) lbu rd, ((_count) * 2)(rPC)
    101 
    102 #endif
    103 
    104 #define GET_INST_OPCODE(rd) and rd, rINST, 0xFF
    105 
    106 /*
    107  * Put the prefetched instruction's opcode field into the specified register.
    108  */
    109 
    110 #define GET_PREFETCHED_OPCODE(dreg, sreg)   andi     dreg, sreg, 255
    111 
    112 #define GOTO_OPCODE(rd) sll rd, rd, ${handler_size_bits}; \
    113     addu      rd, rIBASE, rd; \
    114     jr        rd
    115 
    116 #define GOTO_OPCODE_BASE(_base, rd)  sll rd, rd, ${handler_size_bits}; \
    117     addu      rd, _base, rd; \
    118     jr        rd
    119 
    120 #define GET_VREG(rd, rix) LOAD_eas2(rd, rFP, rix)
    121 
    122 #define GET_VREG_F(rd, rix) EAS2(AT, rFP, rix); \
    123     .set noat; l.s rd, (AT); .set at
    124 
    125 #define SET_VREG(rd, rix) STORE_eas2(rd, rFP, rix)
    126 
    127 #define SET_VREG_GOTO(rd, rix, dst) .set noreorder; \
    128     sll       dst, dst, ${handler_size_bits}; \
    129     addu      dst, rIBASE, dst; \
    130     sll       t8, rix, 2; \
    131     addu      t8, t8, rFP; \
    132     jr        dst; \
    133     sw        rd, 0(t8); \
    134     .set reorder
    135 
    136 #define SET_VREG_F(rd, rix) EAS2(AT, rFP, rix); \
    137     .set noat; s.s rd, (AT); .set at
    138 
    139 
    140 #define GET_OPA(rd) srl rd, rINST, 8
    141 #ifndef MIPS32R2
    142 #define GET_OPA4(rd) GET_OPA(rd); and rd, 0xf
    143 #else
    144 #define GET_OPA4(rd) ext rd, rINST, 8, 4
    145 #endif
    146 #define GET_OPB(rd) srl rd, rINST, 12
    147 
    148 #define LOAD_rSELF_OFF(rd, off) lw rd, offThread_##off## (rSELF)
    149 
    150 #define LOAD_rSELF_method(rd) LOAD_rSELF_OFF(rd, method)
    151 #define LOAD_rSELF_methodClassDex(rd) LOAD_rSELF_OFF(rd, methodClassDex)
    152 #define LOAD_rSELF_interpStackEnd(rd) LOAD_rSELF_OFF(rd, interpStackEnd)
    153 #define LOAD_rSELF_retval(rd) LOAD_rSELF_OFF(rd, retval)
    154 #define LOAD_rSELF_pActiveProfilers(rd) LOAD_rSELF_OFF(rd, pActiveProfilers)
    155 #define LOAD_rSELF_bailPtr(rd) LOAD_rSELF_OFF(rd, bailPtr)
    156 #define LOAD_rSELF_SelfSuspendCount(rd) LOAD_rSELF_OFF(rd, SelfSuspendCount)
    157 
    158 
    159 /*
    160  * Form an Effective Address rd = rbase + roff<<n;
    161  * Uses reg AT
    162  */
    163 #define EASN(rd, rbase, roff, rshift) .set noat; \
    164     sll       AT, roff, rshift; \
    165     addu      rd, rbase, AT; \
    166     .set at
    167 
    168 #define EAS1(rd, rbase, roff) EASN(rd, rbase, roff, 1)
    169 #define EAS2(rd, rbase, roff) EASN(rd, rbase, roff, 2)
    170 #define EAS3(rd, rbase, roff) EASN(rd, rbase, roff, 3)
    171 #define EAS4(rd, rbase, roff) EASN(rd, rbase, roff, 4)
    172 
    173 /*
    174  * Form an Effective Shift Right rd = rbase + roff>>n;
    175  * Uses reg AT
    176  */
    177 #define ESRN(rd, rbase, roff, rshift) .set noat; \
    178     srl       AT, roff, rshift; \
    179     addu      rd, rbase, AT; \
    180     .set at
    181 
    182 #define LOAD_eas2(rd, rbase, roff) EAS2(AT, rbase, roff); \
    183     .set noat; lw rd, 0(AT); .set at
    184 
    185 #define STORE_eas2(rd, rbase, roff) EAS2(AT, rbase, roff); \
    186     .set noat; sw rd, 0(AT); .set at
    187 
    188 #define LOAD_RB_OFF(rd, rbase, off) lw rd, off(rbase)
    189 #define LOADu2_RB_OFF(rd, rbase, off) lhu rd, off(rbase)
    190 #define STORE_RB_OFF(rd, rbase, off) sw rd, off(rbase)
    191 
    192 #ifdef HAVE_LITTLE_ENDIAN
    193 
    194 #define STORE64_off(rlo, rhi, rbase, off) sw rlo, off(rbase); \
    195     sw        rhi, (off+4)(rbase)
    196 #define LOAD64_off(rlo, rhi, rbase, off) lw rlo, off(rbase); \
    197     lw        rhi, (off+4)(rbase)
    198 
    199 #define vSTORE64_off(rlo, rhi, rbase, off) sw rlo, off(rbase); \
    200     sw        rhi, (off+4)(rbase)
    201 #define vLOAD64_off(rlo, rhi, rbase, off) lw rlo, off(rbase); \
    202     lw        rhi, (off+4)(rbase)
    203 
    204 #define STORE64_off_F(rlo, rhi, rbase, off) s.s rlo, off(rbase); \
    205     s.s       rhi, (off+4)(rbase)
    206 #define LOAD64_off_F(rlo, rhi, rbase, off) l.s rlo, off(rbase); \
    207     l.s       rhi, (off+4)(rbase)
    208 #else
    209 
    210 #define STORE64_off(rlo, rhi, rbase, off) sw rlo, (off+4)(rbase); \
    211     sw        rhi, (off)(rbase)
    212 #define LOAD64_off(rlo, rhi, rbase, off) lw rlo, (off+4)(rbase); \
    213     lw        rhi, (off)(rbase)
    214 #define vSTORE64_off(rlo, rhi, rbase, off) sw rlo, (off+4)(rbase); \
    215     sw        rhi, (off)(rbase)
    216 #define vLOAD64_off(rlo, rhi, rbase, off) lw rlo, (off+4)(rbase); \
    217     lw        rhi, (off)(rbase)
    218 #define STORE64_off_F(rlo, rhi, rbase, off) s.s rlo, (off+4)(rbase); \
    219     s.s       rhi, (off)(rbase)
    220 #define LOAD64_off_F(rlo, rhi, rbase, off) l.s rlo, (off+4)(rbase); \
    221     l.s       rhi, (off)(rbase)
    222 #endif
    223 
    224 #define STORE64(rlo, rhi, rbase) STORE64_off(rlo, rhi, rbase, 0)
    225 #define LOAD64(rlo, rhi, rbase) LOAD64_off(rlo, rhi, rbase, 0)
    226 
    227 #define vSTORE64(rlo, rhi, rbase) vSTORE64_off(rlo, rhi, rbase, 0)
    228 #define vLOAD64(rlo, rhi, rbase) vLOAD64_off(rlo, rhi, rbase, 0)
    229 
    230 #define STORE64_F(rlo, rhi, rbase) STORE64_off_F(rlo, rhi, rbase, 0)
    231 #define LOAD64_F(rlo, rhi, rbase) LOAD64_off_F(rlo, rhi, rbase, 0)
    232 
    233 #define STORE64_lo(rd, rbase) sw rd, 0(rbase)
    234 #define STORE64_hi(rd, rbase) sw rd, 4(rbase)
    235 
    236 
    237 #define LOAD_offThread_exception(rd, rbase) LOAD_RB_OFF(rd, rbase, offThread_exception)
    238 #define LOAD_base_offArrayObject_length(rd, rbase) LOAD_RB_OFF(rd, rbase, offArrayObject_length)
    239 #define LOAD_base_offClassObject_accessFlags(rd, rbase) LOAD_RB_OFF(rd, rbase, offClassObject_accessFlags)
    240 #define LOAD_base_offClassObject_descriptor(rd, rbase) LOAD_RB_OFF(rd, rbase, offClassObject_descriptor)
    241 #define LOAD_base_offClassObject_super(rd, rbase) LOAD_RB_OFF(rd, rbase, offClassObject_super)
    242 
    243 #define LOAD_base_offClassObject_vtable(rd, rbase) LOAD_RB_OFF(rd, rbase, offClassObject_vtable)
    244 #define LOAD_base_offClassObject_vtableCount(rd, rbase) LOAD_RB_OFF(rd, rbase, offClassObject_vtableCount)
    245 #define LOAD_base_offDvmDex_pResClasses(rd, rbase) LOAD_RB_OFF(rd, rbase, offDvmDex_pResClasses)
    246 #define LOAD_base_offDvmDex_pResFields(rd, rbase) LOAD_RB_OFF(rd, rbase, offDvmDex_pResFields)
    247 
    248 #define LOAD_base_offDvmDex_pResMethods(rd, rbase) LOAD_RB_OFF(rd, rbase, offDvmDex_pResMethods)
    249 #define LOAD_base_offDvmDex_pResStrings(rd, rbase) LOAD_RB_OFF(rd, rbase, offDvmDex_pResStrings)
    250 #define LOAD_base_offInstField_byteOffset(rd, rbase) LOAD_RB_OFF(rd, rbase, offInstField_byteOffset)
    251 #define LOAD_base_offStaticField_value(rd, rbase) LOAD_RB_OFF(rd, rbase, offStaticField_value)
    252 #define LOAD_base_offMethod_clazz(rd, rbase) LOAD_RB_OFF(rd, rbase, offMethod_clazz)
    253 
    254 #define LOAD_base_offMethod_name(rd, rbase) LOAD_RB_OFF(rd, rbase, offMethod_name)
    255 #define LOAD_base_offObject_clazz(rd, rbase) LOAD_RB_OFF(rd, rbase, offObject_clazz)
    256 
    257 #define LOADu2_offMethod_methodIndex(rd, rbase) LOADu2_RB_OFF(rd, rbase, offMethod_methodIndex)
    258 
    259 
    260 #define STORE_offThread_exception(rd, rbase) STORE_RB_OFF(rd, rbase, offThread_exception)
    261 
    262 
    263 #define STACK_STORE(rd, off) sw rd, off(sp)
    264 #define STACK_LOAD(rd, off) lw rd, off(sp)
    265 #define CREATE_STACK(n) subu sp, sp, n
    266 #define DELETE_STACK(n) addu sp, sp, n
    267 
    268 #define SAVE_RA(offset) STACK_STORE(ra, offset)
    269 #define LOAD_RA(offset) STACK_LOAD(ra, offset)
    270 
    271 #define LOAD_ADDR(dest, addr) la dest, addr
    272 #define LOAD_IMM(dest, imm) li dest, imm
    273 #define MOVE_REG(dest, src) move dest, src
    274 #define RETURN jr ra
    275 #define STACK_SIZE 128
    276 
    277 #define STACK_OFFSET_ARG04 16
    278 #define STACK_OFFSET_ARG05 20
    279 #define STACK_OFFSET_ARG06 24
    280 #define STACK_OFFSET_ARG07 28
    281 #define STACK_OFFSET_SCR   32
    282 #define STACK_OFFSET_SCRMX 80
    283 #define STACK_OFFSET_GP    84
    284 #define STACK_OFFSET_rFP   112
    285 
    286 #define JAL(n) jal n
    287 #define BAL(n) bal n
    288 
    289 #define STACK_STORE_RA() CREATE_STACK(STACK_SIZE); \
    290     STACK_STORE(gp, STACK_OFFSET_GP); \
    291     STACK_STORE(ra, 124)
    292 
    293 #define STACK_STORE_S0() STACK_STORE_RA(); \
    294     STACK_STORE(s0, 116)
    295 
    296 #define STACK_STORE_S0S1() STACK_STORE_S0(); \
    297     STACK_STORE(s1, STACK_OFFSET_rFP)
    298 
    299 #define STACK_LOAD_RA() STACK_LOAD(ra, 124); \
    300     STACK_LOAD(gp, STACK_OFFSET_GP); \
    301     DELETE_STACK(STACK_SIZE)
    302 
    303 #define STACK_LOAD_S0() STACK_LOAD(s0, 116); \
    304     STACK_LOAD_RA()
    305 
    306 #define STACK_LOAD_S0S1() STACK_LOAD(s1, STACK_OFFSET_rFP); \
    307     STACK_LOAD_S0()
    308 
    309 #define STACK_STORE_FULL() CREATE_STACK(STACK_SIZE); \
    310     STACK_STORE(ra, 124); \
    311     STACK_STORE(fp, 120); \
    312     STACK_STORE(s0, 116); \
    313     STACK_STORE(s1, STACK_OFFSET_rFP); \
    314     STACK_STORE(s2, 108); \
    315     STACK_STORE(s3, 104); \
    316     STACK_STORE(s4, 100); \
    317     STACK_STORE(s5, 96); \
    318     STACK_STORE(s6, 92); \
    319     STACK_STORE(s7, 88);
    320 
    321 #define STACK_LOAD_FULL() STACK_LOAD(gp, STACK_OFFSET_GP); \
    322     STACK_LOAD(s7, 88); \
    323     STACK_LOAD(s6, 92); \
    324     STACK_LOAD(s5, 96); \
    325     STACK_LOAD(s4, 100); \
    326     STACK_LOAD(s3, 104); \
    327     STACK_LOAD(s2, 108); \
    328     STACK_LOAD(s1, STACK_OFFSET_rFP); \
    329     STACK_LOAD(s0, 116); \
    330     STACK_LOAD(fp, 120); \
    331     STACK_LOAD(ra, 124); \
    332     DELETE_STACK(STACK_SIZE)
    333 
    334 /*
    335  * first 8 words are reserved for function calls
    336  * Maximum offset is STACK_OFFSET_SCRMX-STACK_OFFSET_SCR
    337  */
    338 #define SCRATCH_STORE(r,off) \
    339     STACK_STORE(r, STACK_OFFSET_SCR+off);
    340 #define SCRATCH_LOAD(r,off) \
    341     STACK_LOAD(r, STACK_OFFSET_SCR+off);
    342 
    343 #if defined(WITH_JIT)
    344 #include "../common/jit-config.h"
    345 #endif
    346