Home | History | Annotate | Download | only in arm64
      1 %def header():
      2 /*
      3  * Copyright (C) 2016 The Android Open Source Project
      4  *
      5  * Licensed under the Apache License, Version 2.0 (the "License");
      6  * you may not use this file except in compliance with the License.
      7  * You may obtain a copy of the License at
      8  *
      9  *      http://www.apache.org/licenses/LICENSE-2.0
     10  *
     11  * Unless required by applicable law or agreed to in writing, software
     12  * distributed under the License is distributed on an "AS IS" BASIS,
     13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     14  * See the License for the specific language governing permissions and
     15  * limitations under the License.
     16  */
     17 
     18 /*
     19   Art assembly interpreter notes:
     20 
     21   First validate assembly code by implementing ExecuteXXXImpl() style body (doesn't
     22   handle invoke, allows higher-level code to create frame & shadow frame.
     23 
     24   Once that's working, support direct entry code & eliminate shadow frame (and
     25   excess locals allocation.
     26 
     27   Some (hopefully) temporary ugliness.  We'll treat xFP as pointing to the
     28   base of the vreg array within the shadow frame.  Access the other fields,
     29   dex_pc_, method_ and number_of_vregs_ via negative offsets.  For now, we'll continue
     30   the shadow frame mechanism of double-storing object references - via xFP &
     31   number_of_vregs_.
     32 
     33  */
     34 
     35 /*
     36 ARM64 Runtime register usage conventions.
     37 
     38   r0     : w0 is 32-bit return register and x0 is 64-bit.
     39   r0-r7  : Argument registers.
     40   r8-r15 : Caller save registers (used as temporary registers).
     41   r16-r17: Also known as ip0-ip1, respectively. Used as scratch registers by
     42            the linker, by the trampolines and other stubs (the backend uses
     43            these as temporary registers).
     44   r18    : Caller save register (used as temporary register).
     45   r19    : Pointer to thread-local storage.
     46   r20-r29: Callee save registers.
     47   r30    : (lr) is reserved (the link register).
     48   rsp    : (sp) is reserved (the stack pointer).
     49   rzr    : (zr) is reserved (the zero register).
     50 
     51   Floating-point registers
     52   v0-v31
     53 
     54   v0     : s0 is return register for singles (32-bit) and d0 for doubles (64-bit).
     55            This is analogous to the C/C++ (hard-float) calling convention.
     56   v0-v7  : Floating-point argument registers in both Dalvik and C/C++ conventions.
     57            Also used as temporary and codegen scratch registers.
     58 
     59   v0-v7 and v16-v31 : trashed across C calls.
     60   v8-v15 : bottom 64-bits preserved across C calls (d8-d15 are preserved).
     61 
     62   v16-v31: Used as codegen temp/scratch.
     63   v8-v15 : Can be used for promotion.
     64 
     65   Must maintain 16-byte stack alignment.
     66 
     67 Mterp notes:
     68 
     69 The following registers have fixed assignments:
     70 
     71   reg nick      purpose
     72   x20  xPC       interpreted program counter, used for fetching instructions
     73   x21  xFP       interpreted frame pointer, used for accessing locals and args
     74   x22  xSELF     self (Thread) pointer
     75   x23  xINST     first 16-bit code unit of current instruction
     76   x24  xIBASE    interpreted instruction base pointer, used for computed goto
     77   x25  xREFS     base of object references in shadow frame  (ideally, we'll get rid of this later).
     78   x26  wPROFILE  jit profile hotness countdown
     79   x16  ip        scratch reg
     80   x17  ip2       scratch reg (used by macros)
     81 
     82 Macros are provided for common operations.  They MUST NOT alter unspecified registers or condition
     83 codes.
     84 */
     85 
     86 /*
     87  * This is a #include, not a %include, because we want the C pre-processor
     88  * to expand the macros into assembler assignment statements.
     89  */
     90 #include "asm_support.h"
     91 #include "interpreter/cfi_asm_support.h"
     92 
     93 #define MTERP_PROFILE_BRANCHES 1
     94 #define MTERP_LOGGING 0
     95 
     96 /* During bringup, we'll use the shadow frame model instead of xFP */
     97 /* single-purpose registers, given names for clarity */
     98 #define xPC      x20
     99 #define CFI_DEX  20 // DWARF register number of the register holding dex-pc (xPC).
    100 #define CFI_TMP  0  // DWARF register number of the first argument register (r0).
    101 #define xFP      x21
    102 #define xSELF    x22
    103 #define xINST    x23
    104 #define wINST    w23
    105 #define xIBASE   x24
    106 #define xREFS    x25
    107 #define wPROFILE w26
    108 #define xPROFILE x26
    109 #define ip       x16
    110 #define ip2      x17
    111 
    112 /*
    113  * Instead of holding a pointer to the shadow frame, we keep xFP at the base of the vregs.  So,
    114  * to access other shadow frame fields, we need to use a backwards offset.  Define those here.
    115  */
    116 #define OFF_FP(a) (a - SHADOWFRAME_VREGS_OFFSET)
    117 #define OFF_FP_NUMBER_OF_VREGS OFF_FP(SHADOWFRAME_NUMBER_OF_VREGS_OFFSET)
    118 #define OFF_FP_DEX_PC OFF_FP(SHADOWFRAME_DEX_PC_OFFSET)
    119 #define OFF_FP_LINK OFF_FP(SHADOWFRAME_LINK_OFFSET)
    120 #define OFF_FP_METHOD OFF_FP(SHADOWFRAME_METHOD_OFFSET)
    121 #define OFF_FP_RESULT_REGISTER OFF_FP(SHADOWFRAME_RESULT_REGISTER_OFFSET)
    122 #define OFF_FP_DEX_PC_PTR OFF_FP(SHADOWFRAME_DEX_PC_PTR_OFFSET)
    123 #define OFF_FP_DEX_INSTRUCTIONS OFF_FP(SHADOWFRAME_DEX_INSTRUCTIONS_OFFSET)
    124 #define OFF_FP_SHADOWFRAME OFF_FP(0)
    125 
    126 /*
    127  * "export" the PC to dex_pc field in the shadow frame, f/b/o future exception objects.  Must
    128  * be done *before* something throws.
    129  *
    130  * It's okay to do this more than once.
    131  *
    132  * NOTE: the fast interpreter keeps track of dex pc as a direct pointer to the mapped
    133  * dex byte codes.  However, the rest of the runtime expects dex pc to be an instruction
    134  * offset into the code_items_[] array.  For effiency, we will "export" the
    135  * current dex pc as a direct pointer using the EXPORT_PC macro, and rely on GetDexPC
    136  * to convert to a dex pc when needed.
    137  */
    138 .macro EXPORT_PC
    139     str  xPC, [xFP, #OFF_FP_DEX_PC_PTR]
    140 .endm
    141 
    142 /*
    143  * Fetch the next instruction from xPC into wINST.  Does not advance xPC.
    144  */
    145 .macro FETCH_INST
    146     ldrh    wINST, [xPC]
    147 .endm
    148 
    149 /*
    150  * Fetch the next instruction from the specified offset.  Advances xPC
    151  * to point to the next instruction.  "_count" is in 16-bit code units.
    152  *
    153  * Because of the limited size of immediate constants on ARM, this is only
    154  * suitable for small forward movements (i.e. don't try to implement "goto"
    155  * with this).
    156  *
    157  * This must come AFTER anything that can throw an exception, or the
    158  * exception catch may miss.  (This also implies that it must come after
    159  * EXPORT_PC.)
    160  */
    161 .macro FETCH_ADVANCE_INST count
    162     ldrh    wINST, [xPC, #((\count)*2)]!
    163 .endm
    164 
    165 /*
    166  * The operation performed here is similar to FETCH_ADVANCE_INST, except the
    167  * src and dest registers are parameterized (not hard-wired to xPC and xINST).
    168  */
    169 .macro PREFETCH_ADVANCE_INST dreg, sreg, count
    170     ldrh    \dreg, [\sreg, #((\count)*2)]!
    171 .endm
    172 
    173 /*
    174  * Similar to FETCH_ADVANCE_INST, but does not update xPC.  Used to load
    175  * xINST ahead of possible exception point.  Be sure to manually advance xPC
    176  * later.
    177  */
    178 .macro PREFETCH_INST count
    179     ldrh    wINST, [xPC, #((\count)*2)]
    180 .endm
    181 
    182 /* Advance xPC by some number of code units. */
    183 .macro ADVANCE count
    184   add  xPC, xPC, #((\count)*2)
    185 .endm
    186 
    187 /*
    188  * Fetch the next instruction from an offset specified by _reg and advance xPC.
    189  * xPC to point to the next instruction.  "_reg" must specify the distance
    190  * in bytes, *not* 16-bit code units, and may be a signed value.  Must not set flags.
    191  *
    192  */
    193 .macro FETCH_ADVANCE_INST_RB reg
    194     add     xPC, xPC, \reg, sxtw
    195     ldrh    wINST, [xPC]
    196 .endm
    197 
    198 /*
    199  * Fetch a half-word code unit from an offset past the current PC.  The
    200  * "_count" value is in 16-bit code units.  Does not advance xPC.
    201  *
    202  * The "_S" variant works the same but treats the value as signed.
    203  */
    204 .macro FETCH reg, count
    205     ldrh    \reg, [xPC, #((\count)*2)]
    206 .endm
    207 
    208 .macro FETCH_S reg, count
    209     ldrsh   \reg, [xPC, #((\count)*2)]
    210 .endm
    211 
    212 /*
    213  * Fetch one byte from an offset past the current PC.  Pass in the same
    214  * "_count" as you would for FETCH, and an additional 0/1 indicating which
    215  * byte of the halfword you want (lo/hi).
    216  */
    217 .macro FETCH_B reg, count, byte
    218     ldrb     \reg, [xPC, #((\count)*2+(\byte))]
    219 .endm
    220 
    221 /*
    222  * Put the instruction's opcode field into the specified register.
    223  */
    224 .macro GET_INST_OPCODE reg
    225     and     \reg, xINST, #255
    226 .endm
    227 
    228 /*
    229  * Put the prefetched instruction's opcode field into the specified register.
    230  */
    231 .macro GET_PREFETCHED_OPCODE oreg, ireg
    232     and     \oreg, \ireg, #255
    233 .endm
    234 
    235 /*
    236  * Begin executing the opcode in _reg.  Clobbers reg
    237  */
    238 
    239 .macro GOTO_OPCODE reg
    240     add     \reg, xIBASE, \reg, lsl #${handler_size_bits}
    241     br      \reg
    242 .endm
    243 .macro GOTO_OPCODE_BASE base,reg
    244     add     \reg, \base, \reg, lsl #${handler_size_bits}
    245     br      \reg
    246 .endm
    247 
    248 /*
    249  * Get/set the 32-bit value from a Dalvik register.
    250  */
    251 .macro GET_VREG reg, vreg
    252     ldr     \reg, [xFP, \vreg, uxtw #2]
    253 .endm
    254 .macro SET_VREG reg, vreg
    255     str     \reg, [xFP, \vreg, uxtw #2]
    256     str     wzr, [xREFS, \vreg, uxtw #2]
    257 .endm
    258 .macro SET_VREG_OBJECT reg, vreg, tmpreg
    259     str     \reg, [xFP, \vreg, uxtw #2]
    260     str     \reg, [xREFS, \vreg, uxtw #2]
    261 .endm
    262 .macro SET_VREG_FLOAT reg, vreg
    263     str     \reg, [xFP, \vreg, uxtw #2]
    264     str     wzr, [xREFS, \vreg, uxtw #2]
    265 .endm
    266 
    267 /*
    268  * Get/set the 64-bit value from a Dalvik register.
    269  */
    270 .macro GET_VREG_WIDE reg, vreg
    271     add     ip2, xFP, \vreg, lsl #2
    272     ldr     \reg, [ip2]
    273 .endm
    274 .macro SET_VREG_WIDE reg, vreg
    275     add     ip2, xFP, \vreg, lsl #2
    276     str     \reg, [ip2]
    277     add     ip2, xREFS, \vreg, lsl #2
    278     str     xzr, [ip2]
    279 .endm
    280 .macro GET_VREG_DOUBLE reg, vreg
    281     add     ip2, xFP, \vreg, lsl #2
    282     ldr     \reg, [ip2]
    283 .endm
    284 .macro SET_VREG_DOUBLE reg, vreg
    285     add     ip2, xFP, \vreg, lsl #2
    286     str     \reg, [ip2]
    287     add     ip2, xREFS, \vreg, lsl #2
    288     str     xzr, [ip2]
    289 .endm
    290 
    291 /*
    292  * Get the 32-bit value from a Dalvik register and sign-extend to 64-bit.
    293  * Used to avoid an extra instruction in int-to-long.
    294  */
    295 .macro GET_VREG_S reg, vreg
    296     ldrsw   \reg, [xFP, \vreg, uxtw #2]
    297 .endm
    298 
    299 /*
    300  * Convert a virtual register index into an address.
    301  */
    302 .macro VREG_INDEX_TO_ADDR reg, vreg
    303     add     \reg, xFP, \vreg, lsl #2   /* WARNING: handle shadow frame vreg zero if store */
    304 .endm
    305 
    306 /*
    307  * Refresh handler table.
    308  */
    309 .macro REFRESH_IBASE
    310   ldr     xIBASE, [xSELF, #THREAD_CURRENT_IBASE_OFFSET]
    311 .endm
    312 
    313 /*
    314  * Save two registers to the stack.
    315  */
    316 .macro SAVE_TWO_REGS reg1, reg2, offset
    317     stp \reg1, \reg2, [sp, #(\offset)]
    318     .cfi_rel_offset \reg1, (\offset)
    319     .cfi_rel_offset \reg2, (\offset) + 8
    320 .endm
    321 
    322 /*
    323  * Restore two registers from the stack.
    324  */
    325 .macro RESTORE_TWO_REGS reg1, reg2, offset
    326     ldp \reg1, \reg2, [sp, #(\offset)]
    327     .cfi_restore \reg1
    328     .cfi_restore \reg2
    329 .endm
    330 
    331 /*
    332  * Increase frame size and save two registers to the bottom of the stack.
    333  */
    334 .macro SAVE_TWO_REGS_INCREASE_FRAME reg1, reg2, frame_adjustment
    335     stp \reg1, \reg2, [sp, #-(\frame_adjustment)]!
    336     .cfi_adjust_cfa_offset (\frame_adjustment)
    337     .cfi_rel_offset \reg1, 0
    338     .cfi_rel_offset \reg2, 8
    339 .endm
    340 
    341 /*
    342  * Restore two registers from the bottom of the stack and decrease frame size.
    343  */
    344 .macro RESTORE_TWO_REGS_DECREASE_FRAME reg1, reg2, frame_adjustment
    345     ldp \reg1, \reg2, [sp], #(\frame_adjustment)
    346     .cfi_restore \reg1
    347     .cfi_restore \reg2
    348     .cfi_adjust_cfa_offset -(\frame_adjustment)
    349 .endm
    350 
    351 /*
    352  * function support macros.
    353  */
    354 .macro ENTRY name
    355     .type \name, #function
    356     .hidden \name  // Hide this as a global symbol, so we do not incur plt calls.
    357     .global \name
    358     /* Cache alignment for function entry */
    359     .balign 16
    360 \name:
    361 .endm
    362 
    363 .macro END name
    364     .size \name, .-\name
    365 .endm
    366 
    367 // Macro to unpoison (negate) the reference for heap poisoning.
    368 .macro UNPOISON_HEAP_REF rRef
    369 #ifdef USE_HEAP_POISONING
    370     neg \rRef, \rRef
    371 #endif  // USE_HEAP_POISONING
    372 .endm
    373 
    374 %def entry():
    375 /*
    376  * Copyright (C) 2016 The Android Open Source Project
    377  *
    378  * Licensed under the Apache License, Version 2.0 (the "License");
    379  * you may not use this file except in compliance with the License.
    380  * You may obtain a copy of the License at
    381  *
    382  *      http://www.apache.org/licenses/LICENSE-2.0
    383  *
    384  * Unless required by applicable law or agreed to in writing, software
    385  * distributed under the License is distributed on an "AS IS" BASIS,
    386  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    387  * See the License for the specific language governing permissions and
    388  * limitations under the License.
    389  */
    390 
    391     .text
    392 
    393 /*
    394  * Interpreter entry point.
    395  * On entry:
    396  *  x0  Thread* self/
    397  *  x1  insns_
    398  *  x2  ShadowFrame
    399  *  x3  JValue* result_register
    400  *
    401  */
    402 ENTRY ExecuteMterpImpl
    403     .cfi_startproc
    404     SAVE_TWO_REGS_INCREASE_FRAME xPROFILE, x27, 80
    405     SAVE_TWO_REGS                xIBASE, xREFS, 16
    406     SAVE_TWO_REGS                xSELF, xINST, 32
    407     SAVE_TWO_REGS                xPC, xFP, 48
    408     SAVE_TWO_REGS                fp, lr, 64
    409     add     fp, sp, #64
    410 
    411     /* Remember the return register */
    412     str     x3, [x2, #SHADOWFRAME_RESULT_REGISTER_OFFSET]
    413 
    414     /* Remember the dex instruction pointer */
    415     str     x1, [x2, #SHADOWFRAME_DEX_INSTRUCTIONS_OFFSET]
    416 
    417     /* set up "named" registers */
    418     mov     xSELF, x0
    419     ldr     w0, [x2, #SHADOWFRAME_NUMBER_OF_VREGS_OFFSET]
    420     add     xFP, x2, #SHADOWFRAME_VREGS_OFFSET     // point to vregs.
    421     add     xREFS, xFP, w0, lsl #2                 // point to reference array in shadow frame
    422     ldr     w0, [x2, #SHADOWFRAME_DEX_PC_OFFSET]   // Get starting dex_pc.
    423     add     xPC, x1, w0, lsl #1                    // Create direct pointer to 1st dex opcode
    424     CFI_DEFINE_DEX_PC_WITH_OFFSET(CFI_TMP, CFI_DEX, 0)
    425     EXPORT_PC
    426 
    427     /* Starting ibase */
    428     ldr     xIBASE, [xSELF, #THREAD_CURRENT_IBASE_OFFSET]
    429 
    430     /* Set up for backwards branches & osr profiling */
    431     ldr     x0, [xFP, #OFF_FP_METHOD]
    432     add     x1, xFP, #OFF_FP_SHADOWFRAME
    433     mov     x2, xSELF
    434     bl      MterpSetUpHotnessCountdown
    435     mov     wPROFILE, w0                // Starting hotness countdown to xPROFILE
    436 
    437     /* start executing the instruction at rPC */
    438     FETCH_INST                          // load wINST from rPC
    439     GET_INST_OPCODE ip                  // extract opcode from wINST
    440     GOTO_OPCODE ip                      // jump to next instruction
    441     /* NOTE: no fallthrough */
    442     // cfi info continues, and covers the whole mterp implementation.
    443     END ExecuteMterpImpl
    444 
    445 %def dchecks_before_helper():
    446     // Call C++ to do debug checks and return to the handler using tail call.
    447     .extern MterpCheckBefore
    448     mov    x0, xSELF
    449     add    x1, xFP, #OFF_FP_SHADOWFRAME
    450     mov    x2, xPC
    451     b      MterpCheckBefore     // (self, shadow_frame, dex_pc_ptr) Note: tail call.
    452 
    453 %def opcode_pre():
    454 %  add_helper(dchecks_before_helper, "mterp_dchecks_before_helper")
    455     #if !defined(NDEBUG)
    456     bl     mterp_dchecks_before_helper
    457     #endif
    458 
    459 %def footer():
    460     .cfi_endproc
    461     END MterpHelpers
    462 
    463 %def fallback():
    464 /* Transfer stub to alternate interpreter */
    465     b    MterpFallback
    466 
    467 
    468 %def helpers():
    469     ENTRY MterpHelpers
    470 /*
    471  * ===========================================================================
    472  *  Common subroutines and data
    473  * ===========================================================================
    474  */
    475 
    476 /*
    477  * We've detected a condition that will result in an exception, but the exception
    478  * has not yet been thrown.  Just bail out to the reference interpreter to deal with it.
    479  * TUNING: for consistency, we may want to just go ahead and handle these here.
    480  */
    481 common_errDivideByZero:
    482     EXPORT_PC
    483 #if MTERP_LOGGING
    484     mov  x0, xSELF
    485     add  x1, xFP, #OFF_FP_SHADOWFRAME
    486     bl MterpLogDivideByZeroException
    487 #endif
    488     b MterpCommonFallback
    489 
    490 common_errArrayIndex:
    491     EXPORT_PC
    492 #if MTERP_LOGGING
    493     mov  x0, xSELF
    494     add  x1, xFP, #OFF_FP_SHADOWFRAME
    495     bl MterpLogArrayIndexException
    496 #endif
    497     b MterpCommonFallback
    498 
    499 common_errNegativeArraySize:
    500     EXPORT_PC
    501 #if MTERP_LOGGING
    502     mov  x0, xSELF
    503     add  x1, xFP, #OFF_FP_SHADOWFRAME
    504     bl MterpLogNegativeArraySizeException
    505 #endif
    506     b MterpCommonFallback
    507 
    508 common_errNoSuchMethod:
    509     EXPORT_PC
    510 #if MTERP_LOGGING
    511     mov  x0, xSELF
    512     add  x1, xFP, #OFF_FP_SHADOWFRAME
    513     bl MterpLogNoSuchMethodException
    514 #endif
    515     b MterpCommonFallback
    516 
    517 common_errNullObject:
    518     EXPORT_PC
    519 #if MTERP_LOGGING
    520     mov  x0, xSELF
    521     add  x1, xFP, #OFF_FP_SHADOWFRAME
    522     bl MterpLogNullObjectException
    523 #endif
    524     b MterpCommonFallback
    525 
    526 common_exceptionThrown:
    527     EXPORT_PC
    528 #if MTERP_LOGGING
    529     mov  x0, xSELF
    530     add  x1, xFP, #OFF_FP_SHADOWFRAME
    531     bl MterpLogExceptionThrownException
    532 #endif
    533     b MterpCommonFallback
    534 
    535 MterpSuspendFallback:
    536     EXPORT_PC
    537 #if MTERP_LOGGING
    538     mov  x0, xSELF
    539     add  x1, xFP, #OFF_FP_SHADOWFRAME
    540     ldr  x2, [xSELF, #THREAD_FLAGS_OFFSET]
    541     bl MterpLogSuspendFallback
    542 #endif
    543     b MterpCommonFallback
    544 
    545 /*
    546  * If we're here, something is out of the ordinary.  If there is a pending
    547  * exception, handle it.  Otherwise, roll back and retry with the reference
    548  * interpreter.
    549  */
    550 MterpPossibleException:
    551     ldr     x0, [xSELF, #THREAD_EXCEPTION_OFFSET]
    552     cbz     x0, MterpFallback                       // If not, fall back to reference interpreter.
    553     /* intentional fallthrough - handle pending exception. */
    554 /*
    555  * On return from a runtime helper routine, we've found a pending exception.
    556  * Can we handle it here - or need to bail out to caller?
    557  *
    558  */
    559 MterpException:
    560     mov     x0, xSELF
    561     add     x1, xFP, #OFF_FP_SHADOWFRAME
    562     bl      MterpHandleException                    // (self, shadow_frame)
    563     cbz     w0, MterpExceptionReturn                // no local catch, back to caller.
    564     ldr     x0, [xFP, #OFF_FP_DEX_INSTRUCTIONS]
    565     ldr     w1, [xFP, #OFF_FP_DEX_PC]
    566     ldr     xIBASE, [xSELF, #THREAD_CURRENT_IBASE_OFFSET]
    567     add     xPC, x0, x1, lsl #1                     // generate new dex_pc_ptr
    568     /* Do we need to switch interpreters? */
    569     ldr     w0, [xSELF, #THREAD_USE_MTERP_OFFSET]
    570     cbz     w0, MterpFallback
    571     /* resume execution at catch block */
    572     EXPORT_PC
    573     FETCH_INST
    574     GET_INST_OPCODE ip
    575     GOTO_OPCODE ip
    576     /* NOTE: no fallthrough */
    577 /*
    578  * Common handling for branches with support for Jit profiling.
    579  * On entry:
    580  *    wINST          <= signed offset
    581  *    wPROFILE       <= signed hotness countdown (expanded to 32 bits)
    582  *    condition bits <= set to establish sign of offset (use "NoFlags" entry if not)
    583  *
    584  * We have quite a few different cases for branch profiling, OSR detection and
    585  * suspend check support here.
    586  *
    587  * Taken backward branches:
    588  *    If profiling active, do hotness countdown and report if we hit zero.
    589  *    If in osr check mode, see if our target is a compiled loop header entry and do OSR if so.
    590  *    Is there a pending suspend request?  If so, suspend.
    591  *
    592  * Taken forward branches and not-taken backward branches:
    593  *    If in osr check mode, see if our target is a compiled loop header entry and do OSR if so.
    594  *
    595  * Our most common case is expected to be a taken backward branch with active jit profiling,
    596  * but no full OSR check and no pending suspend request.
    597  * Next most common case is not-taken branch with no full OSR check.
    598  *
    599  */
    600 MterpCommonTakenBranchNoFlags:
    601     cmp     wINST, #0
    602     b.gt    .L_forward_branch           // don't add forward branches to hotness
    603     tbnz    wPROFILE, #31, .L_no_count_backwards  // go if negative
    604     subs    wPROFILE, wPROFILE, #1      // countdown
    605     b.eq    .L_add_batch                // counted down to zero - report
    606 .L_resume_backward_branch:
    607     ldr     lr, [xSELF, #THREAD_FLAGS_OFFSET]
    608     add     w2, wINST, wINST            // w2<- byte offset
    609     FETCH_ADVANCE_INST_RB w2            // update rPC, load wINST
    610     REFRESH_IBASE
    611     ands    lr, lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
    612     b.ne    .L_suspend_request_pending
    613     GET_INST_OPCODE ip                  // extract opcode from wINST
    614     GOTO_OPCODE ip                      // jump to next instruction
    615 
    616 .L_suspend_request_pending:
    617     EXPORT_PC
    618     mov     x0, xSELF
    619     bl      MterpSuspendCheck           // (self)
    620     cbnz    x0, MterpFallback
    621     REFRESH_IBASE                       // might have changed during suspend
    622     GET_INST_OPCODE ip                  // extract opcode from wINST
    623     GOTO_OPCODE ip                      // jump to next instruction
    624 
    625 .L_no_count_backwards:
    626     cmp     wPROFILE, #JIT_CHECK_OSR    // possible OSR re-entry?
    627     b.ne    .L_resume_backward_branch
    628     mov     x0, xSELF
    629     add     x1, xFP, #OFF_FP_SHADOWFRAME
    630     mov     x2, xINST
    631     EXPORT_PC
    632     bl      MterpMaybeDoOnStackReplacement  // (self, shadow_frame, offset)
    633     cbnz    x0, MterpOnStackReplacement
    634     b       .L_resume_backward_branch
    635 
    636 .L_forward_branch:
    637     cmp     wPROFILE, #JIT_CHECK_OSR    // possible OSR re-entry?
    638     b.eq    .L_check_osr_forward
    639 .L_resume_forward_branch:
    640     add     w2, wINST, wINST            // w2<- byte offset
    641     FETCH_ADVANCE_INST_RB w2            // update rPC, load wINST
    642     GET_INST_OPCODE ip                  // extract opcode from wINST
    643     GOTO_OPCODE ip                      // jump to next instruction
    644 
    645 .L_check_osr_forward:
    646     mov     x0, xSELF
    647     add     x1, xFP, #OFF_FP_SHADOWFRAME
    648     mov     x2, xINST
    649     EXPORT_PC
    650     bl      MterpMaybeDoOnStackReplacement  // (self, shadow_frame, offset)
    651     cbnz    x0, MterpOnStackReplacement
    652     b       .L_resume_forward_branch
    653 
    654 .L_add_batch:
    655     add     x1, xFP, #OFF_FP_SHADOWFRAME
    656     strh    wPROFILE, [x1, #SHADOWFRAME_HOTNESS_COUNTDOWN_OFFSET]
    657     ldr     x0, [xFP, #OFF_FP_METHOD]
    658     mov     x2, xSELF
    659     bl      MterpAddHotnessBatch        // (method, shadow_frame, self)
    660     mov     wPROFILE, w0                // restore new hotness countdown to wPROFILE
    661     b       .L_no_count_backwards
    662 
    663 /*
    664  * Entered from the conditional branch handlers when OSR check request active on
    665  * not-taken path.  All Dalvik not-taken conditional branch offsets are 2.
    666  */
    667 .L_check_not_taken_osr:
    668     mov     x0, xSELF
    669     add     x1, xFP, #OFF_FP_SHADOWFRAME
    670     mov     x2, #2
    671     EXPORT_PC
    672     bl      MterpMaybeDoOnStackReplacement  // (self, shadow_frame, offset)
    673     cbnz    x0, MterpOnStackReplacement
    674     FETCH_ADVANCE_INST 2
    675     GET_INST_OPCODE ip                  // extract opcode from wINST
    676     GOTO_OPCODE ip                      // jump to next instruction
    677 
    678 /*
    679  * Check for suspend check request.  Assumes wINST already loaded, xPC advanced and
    680  * still needs to get the opcode and branch to it, and flags are in lr.
    681  */
    682 MterpCheckSuspendAndContinue:
    683     ldr     xIBASE, [xSELF, #THREAD_CURRENT_IBASE_OFFSET]  // refresh xIBASE
    684     ands    w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
    685     b.ne    check1
    686     GET_INST_OPCODE ip                  // extract opcode from wINST
    687     GOTO_OPCODE ip                      // jump to next instruction
    688 check1:
    689     EXPORT_PC
    690     mov     x0, xSELF
    691     bl      MterpSuspendCheck           // (self)
    692     cbnz    x0, MterpFallback           // Something in the environment changed, switch interpreters
    693     GET_INST_OPCODE ip                  // extract opcode from wINST
    694     GOTO_OPCODE ip                      // jump to next instruction
    695 
    696 /*
    697  * On-stack replacement has happened, and now we've returned from the compiled method.
    698  */
    699 MterpOnStackReplacement:
    700 #if MTERP_LOGGING
    701     mov  x0, xSELF
    702     add  x1, xFP, #OFF_FP_SHADOWFRAME
    703     sxtw x2, wINST
    704     bl MterpLogOSR
    705 #endif
    706     mov  x0, #1                         // Signal normal return
    707     b    MterpDone
    708 
    709 /*
    710  * Bail out to reference interpreter.
    711  */
    712 MterpFallback:
    713     EXPORT_PC
    714 #if MTERP_LOGGING
    715     mov  x0, xSELF
    716     add  x1, xFP, #OFF_FP_SHADOWFRAME
    717     bl MterpLogFallback
    718 #endif
    719 MterpCommonFallback:
    720     mov     x0, #0                                  // signal retry with reference interpreter.
    721     b       MterpDone
    722 
    723 /*
    724  * We pushed some registers on the stack in ExecuteMterpImpl, then saved
    725  * SP and LR.  Here we restore SP, restore the registers, and then restore
    726  * LR to PC.
    727  *
    728  * On entry:
    729  *  uint32_t* xFP  (should still be live, pointer to base of vregs)
    730  */
    731 MterpExceptionReturn:
    732     mov     x0, #1                                  // signal return to caller.
    733     b MterpDone
    734 MterpReturn:
    735     ldr     x2, [xFP, #OFF_FP_RESULT_REGISTER]
    736     str     x0, [x2]
    737     mov     x0, #1                                  // signal return to caller.
    738 MterpDone:
    739 /*
    740  * At this point, we expect wPROFILE to be non-zero.  If negative, hotness is disabled or we're
    741  * checking for OSR.  If greater than zero, we might have unreported hotness to register
    742  * (the difference between the ending wPROFILE and the cached hotness counter).  wPROFILE
    743  * should only reach zero immediately after a hotness decrement, and is then reset to either
    744  * a negative special state or the new non-zero countdown value.
    745  */
    746     cmp     wPROFILE, #0
    747     bgt     MterpProfileActive                      // if > 0, we may have some counts to report.
    748     .cfi_remember_state
    749     RESTORE_TWO_REGS                fp, lr, 64
    750     RESTORE_TWO_REGS                xPC, xFP, 48
    751     RESTORE_TWO_REGS                xSELF, xINST, 32
    752     RESTORE_TWO_REGS                xIBASE, xREFS, 16
    753     RESTORE_TWO_REGS_DECREASE_FRAME xPROFILE, x27, 80
    754     ret
    755     .cfi_restore_state                              // Reset unwind info so following code unwinds.
    756     .cfi_def_cfa_offset 80                          // workaround for clang bug: 31975598
    757 
    758 MterpProfileActive:
    759     mov     xINST, x0                               // stash return value
    760     /* Report cached hotness counts */
    761     ldr     x0, [xFP, #OFF_FP_METHOD]
    762     add     x1, xFP, #OFF_FP_SHADOWFRAME
    763     mov     x2, xSELF
    764     strh    wPROFILE, [x1, #SHADOWFRAME_HOTNESS_COUNTDOWN_OFFSET]
    765     bl      MterpAddHotnessBatch                    // (method, shadow_frame, self)
    766     mov     x0, xINST                               // restore return value
    767     RESTORE_TWO_REGS                fp, lr, 64
    768     RESTORE_TWO_REGS                xPC, xFP, 48
    769     RESTORE_TWO_REGS                xSELF, xINST, 32
    770     RESTORE_TWO_REGS                xIBASE, xREFS, 16
    771     RESTORE_TWO_REGS_DECREASE_FRAME xPROFILE, x27, 80
    772     ret
    773 
    774 
    775 %def instruction_end():
    776 
    777     .type artMterpAsmInstructionEnd, #object
    778     .hidden artMterpAsmInstructionEnd
    779     .global artMterpAsmInstructionEnd
    780 artMterpAsmInstructionEnd:
    781 
    782 %def instruction_start():
    783 
    784     .type artMterpAsmInstructionStart, #object
    785     .hidden artMterpAsmInstructionStart
    786     .global artMterpAsmInstructionStart
    787 artMterpAsmInstructionStart = .L_op_nop
    788     .text
    789 
    790 %def opcode_start():
    791     ENTRY mterp_${opcode}
    792 %def opcode_end():
    793     END mterp_${opcode}
    794 %def helper_start(name):
    795     ENTRY ${name}
    796 %def helper_end(name):
    797     END ${name}
    798