Home | History | Annotate | Download | only in arm
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "asm_support_arm.S"
     18 
     19 #include "arch/quick_alloc_entrypoints.S"
     20 
     21     /* Deliver the given exception */
     22     .extern artDeliverExceptionFromCode
     23     /* Deliver an exception pending on a thread */
     24     .extern artDeliverPendingException
     25 
     26     /*
     27      * Macro that sets up the callee save frame to conform with
     28      * Runtime::CreateCalleeSaveMethod(kSaveAll)
     29      */
     30 .macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
     31     push {r4-r11, lr} @ 9 words of callee saves
     32     .save {r4-r11, lr}
     33     .cfi_adjust_cfa_offset 36
     34     .cfi_rel_offset r4, 0
     35     .cfi_rel_offset r5, 4
     36     .cfi_rel_offset r6, 8
     37     .cfi_rel_offset r7, 12
     38     .cfi_rel_offset r8, 16
     39     .cfi_rel_offset r9, 20
     40     .cfi_rel_offset r10, 24
     41     .cfi_rel_offset r11, 28
     42     .cfi_rel_offset lr, 32
     43     vpush {s0-s31}
     44     .pad #128
     45     .cfi_adjust_cfa_offset 128
     46     sub sp, #12       @ 3 words of space, bottom word will hold Method*
     47     .pad #12
     48     .cfi_adjust_cfa_offset 12
     49 
     50      // Ugly compile-time check, but we only have the preprocessor.
     51 #if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 36 + 128 + 12)
     52 #error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM) size not as expected."
     53 #endif
     54 .endm
     55 
     56     /*
     57      * Macro that sets up the callee save frame to conform with
     58      * Runtime::CreateCalleeSaveMethod(kRefsOnly).
     59      */
     60 .macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME
     61     push {r5-r8, r10-r11, lr} @ 7 words of callee saves
     62     .save {r5-r8, r10-r11, lr}
     63     .cfi_adjust_cfa_offset 28
     64     .cfi_rel_offset r5, 0
     65     .cfi_rel_offset r6, 4
     66     .cfi_rel_offset r7, 8
     67     .cfi_rel_offset r8, 12
     68     .cfi_rel_offset r10, 16
     69     .cfi_rel_offset r11, 20
     70     .cfi_rel_offset lr, 24
     71     sub sp, #4                @ bottom word will hold Method*
     72     .pad #4
     73     .cfi_adjust_cfa_offset 4
     74 
     75     // Ugly compile-time check, but we only have the preprocessor.
     76 #if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 28 + 4)
     77 #error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM) size not as expected."
     78 #endif
     79 .endm
     80 
     81 .macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
     82     add sp, #4               @ bottom word holds Method*
     83     pop {r5-r8, r10-r11, lr} @ 7 words of callee saves
     84     .cfi_restore r5
     85     .cfi_restore r6
     86     .cfi_restore r7
     87     .cfi_restore r8
     88     .cfi_restore r10
     89     .cfi_restore r11
     90     .cfi_adjust_cfa_offset -32
     91 .endm
     92 
     93 .macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
     94     add sp, #4               @ bottom word holds Method*
     95     pop {r5-r8, r10-r11, lr} @ 7 words of callee saves
     96     .cfi_restore r5
     97     .cfi_restore r6
     98     .cfi_restore r7
     99     .cfi_restore r8
    100     .cfi_restore r10
    101     .cfi_restore r11
    102     .cfi_adjust_cfa_offset -32
    103     bx  lr                   @ return
    104 .endm
    105 
    106     /*
    107      * Macro that sets up the callee save frame to conform with
    108      * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
    109      */
    110 .macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
    111     push {r1-r3, r5-r8, r10-r11, lr}  @ 10 words of callee saves
    112     .save {r1-r3, r5-r8, r10-r11, lr}
    113     .cfi_rel_offset r1, 0
    114     .cfi_rel_offset r2, 4
    115     .cfi_rel_offset r3, 8
    116     .cfi_rel_offset r5, 12
    117     .cfi_rel_offset r6, 16
    118     .cfi_rel_offset r7, 20
    119     .cfi_rel_offset r8, 24
    120     .cfi_rel_offset r10, 28
    121     .cfi_rel_offset r11, 32
    122     .cfi_rel_offset lr, 36
    123     .cfi_adjust_cfa_offset 40
    124     sub sp, #8                        @ 2 words of space, bottom word will hold Method*
    125     .pad #8
    126     .cfi_adjust_cfa_offset 8
    127 
    128     // Ugly compile-time check, but we only have the preprocessor.
    129 #if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 40 + 8)
    130 #error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM) size not as expected."
    131 #endif
    132 .endm
    133 
    134 .macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
    135     add  sp, #8                      @ rewind sp
    136     pop {r1-r3, r5-r8, r10-r11, lr}  @ 10 words of callee saves
    137     .cfi_restore r1
    138     .cfi_restore r2
    139     .cfi_restore r3
    140     .cfi_restore r5
    141     .cfi_restore r6
    142     .cfi_restore r7
    143     .cfi_restore r8
    144     .cfi_restore r10
    145     .cfi_restore r11
    146     .cfi_adjust_cfa_offset -48
    147 .endm
    148 
    149 .macro RETURN_IF_RESULT_IS_ZERO
    150     cbnz   r0, 1f              @ result non-zero branch over
    151     bx     lr                  @ return
    152 1:
    153 .endm
    154 
    155 .macro RETURN_IF_RESULT_IS_NON_ZERO
    156     cbz    r0, 1f              @ result zero branch over
    157     bx     lr                  @ return
    158 1:
    159 .endm
    160 
    161     /*
    162      * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
    163      * exception is Thread::Current()->exception_
    164      */
    165 .macro DELIVER_PENDING_EXCEPTION
    166     .fnend
    167     .fnstart
    168     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME           @ save callee saves for throw
    169     mov    r0, r9                              @ pass Thread::Current
    170     mov    r1, sp                              @ pass SP
    171     b      artDeliverPendingExceptionFromCode  @ artDeliverPendingExceptionFromCode(Thread*, SP)
    172 .endm
    173 
    174 .macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
    175     .extern \cxx_name
    176 ENTRY \c_name
    177     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  // save all registers as basis for long jump context
    178     mov r0, r9                      @ pass Thread::Current
    179     mov r1, sp                      @ pass SP
    180     b   \cxx_name                   @ \cxx_name(Thread*, SP)
    181 END \c_name
    182 .endm
    183 
    184 .macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
    185     .extern \cxx_name
    186 ENTRY \c_name
    187     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  // save all registers as basis for long jump context
    188     mov r1, r9                      @ pass Thread::Current
    189     mov r2, sp                      @ pass SP
    190     b   \cxx_name                   @ \cxx_name(Thread*, SP)
    191     bkpt
    192 END \c_name
    193 .endm
    194 
    195 .macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
    196     .extern \cxx_name
    197 ENTRY \c_name
    198     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  // save all registers as basis for long jump context
    199     mov r2, r9                      @ pass Thread::Current
    200     mov r3, sp                      @ pass SP
    201     b   \cxx_name                   @ \cxx_name(Thread*, SP)
    202     bkpt
    203 END \c_name
    204 .endm
    205 
    206     /*
    207      * Called by managed code, saves callee saves and then calls artThrowException
    208      * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
    209      */
    210 ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
    211 
    212     /*
    213      * Called by managed code to create and deliver a NullPointerException.
    214      */
    215 NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
    216 
    217     /*
    218      * Called by managed code to create and deliver an ArithmeticException.
    219      */
    220 NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
    221 
    222     /*
    223      * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
    224      * index, arg2 holds limit.
    225      */
    226 TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
    227 
    228     /*
    229      * Called by managed code to create and deliver a StackOverflowError.
    230      */
    231 NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
    232 
    233     /*
    234      * Called by managed code to create and deliver a NoSuchMethodError.
    235      */
    236 ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
    237 
    238     /*
    239      * All generated callsites for interface invokes and invocation slow paths will load arguments
    240      * as usual - except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
    241      * the method_idx.  This wrapper will save arg1-arg3, load the caller's Method*, align the
    242      * stack and call the appropriate C helper.
    243      * NOTE: "this" is first visible argument of the target, and so can be found in arg1/r1.
    244      *
    245      * The helper will attempt to locate the target and return a 64-bit result in r0/r1 consisting
    246      * of the target Method* in r0 and method->code_ in r1.
    247      *
    248      * If unsuccessful, the helper will return NULL/NULL. There will bea pending exception in the
    249      * thread and we branch to another stub to deliver it.
    250      *
    251      * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
    252      * pointing back to the original caller.
    253      */
    254 .macro INVOKE_TRAMPOLINE c_name, cxx_name
    255     .extern \cxx_name
    256 ENTRY \c_name
    257     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME  @ save callee saves in case allocation triggers GC
    258     ldr    r2, [sp, #48]                  @ pass caller Method*
    259     mov    r3, r9                         @ pass Thread::Current
    260     mov    r12, sp
    261     str    r12, [sp, #-16]!               @ expand the frame and pass SP
    262     .pad #16
    263     .cfi_adjust_cfa_offset 16
    264     bl     \cxx_name                      @ (method_idx, this, caller, Thread*, SP)
    265     add    sp, #16                        @ strip the extra frame
    266     .cfi_adjust_cfa_offset -16
    267     mov    r12, r1                        @ save Method*->code_
    268     RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
    269     cbz    r0, 1f                         @ did we find the target? if not go to exception delivery
    270     bx     r12                            @ tail call to target
    271 1:
    272     DELIVER_PENDING_EXCEPTION
    273 END \c_name
    274 .endm
    275 
    276 INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
    277 INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
    278 
    279 INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
    280 INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
    281 INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
    282 INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
    283 
    284     /*
    285      * Quick invocation stub.
    286      * On entry:
    287      *   r0 = method pointer
    288      *   r1 = argument array or NULL for no argument methods
    289      *   r2 = size of argument array in bytes
    290      *   r3 = (managed) thread pointer
    291      *   [sp] = JValue* result
    292      *   [sp + 4] = shorty
    293      */
    294 ENTRY art_quick_invoke_stub
    295     push   {r0, r4, r5, r9, r11, lr}       @ spill regs
    296     .save  {r0, r4, r5, r9, r11, lr}
    297     .pad #24
    298     .cfi_adjust_cfa_offset 24
    299     .cfi_rel_offset r0, 0
    300     .cfi_rel_offset r4, 4
    301     .cfi_rel_offset r5, 8
    302     .cfi_rel_offset r9, 12
    303     .cfi_rel_offset r11, 16
    304     .cfi_rel_offset lr, 20
    305     mov    r11, sp                         @ save the stack pointer
    306     .cfi_def_cfa_register r11
    307     mov    r9, r3                          @ move managed thread pointer into r9
    308 #ifdef ARM_R4_SUSPEND_FLAG
    309     mov    r4, #SUSPEND_CHECK_INTERVAL     @ reset r4 to suspend check interval
    310 #endif
    311     add    r5, r2, #4                      @ create space for method pointer in frame
    312 
    313     sub    r5, sp, r5                      @ reserve & align *stack* to 16 bytes: native calling
    314     and    r5, #0xFFFFFFF0                 @ convention only aligns to 8B, so we have to ensure ART
    315     mov    sp, r5                          @ 16B alignment ourselves.
    316 
    317     add    r0, sp, #4                      @ pass stack pointer + method ptr as dest for memcpy
    318     bl     memcpy                          @ memcpy (dest, src, bytes)
    319     ldr    r0, [r11]                       @ restore method*
    320     ldr    r1, [sp, #4]                    @ copy arg value for r1
    321     ldr    r2, [sp, #8]                    @ copy arg value for r2
    322     ldr    r3, [sp, #12]                   @ copy arg value for r3
    323     mov    ip, #0                          @ set ip to 0
    324     str    ip, [sp]                        @ store NULL for method* at bottom of frame
    325     ldr    ip, [r0, #METHOD_QUICK_CODE_OFFSET_32]  @ get pointer to the code
    326     blx    ip                              @ call the method
    327     mov    sp, r11                         @ restore the stack pointer
    328     ldr    ip, [sp, #24]                   @ load the result pointer
    329     strd   r0, [ip]                        @ store r0/r1 into result pointer
    330     pop    {r0, r4, r5, r9, r11, lr}       @ restore spill regs
    331     .cfi_restore r0
    332     .cfi_restore r4
    333     .cfi_restore r5
    334     .cfi_restore r9
    335     .cfi_restore lr
    336     .cfi_adjust_cfa_offset -24
    337     bx     lr
    338 END art_quick_invoke_stub
    339 
    340     /*
    341      * On entry r0 is uint32_t* gprs_ and r1 is uint32_t* fprs_
    342      */
    343 ARM_ENTRY art_quick_do_long_jump
    344     vldm r1, {s0-s31}     @ load all fprs from argument fprs_
    345     ldr  r2, [r0, #60]    @ r2 = r15 (PC from gprs_ 60=4*15)
    346     ldr  r14, [r0, #56]   @ (LR from gprs_ 56=4*14)
    347     add  r0, r0, #12      @ increment r0 to skip gprs_[0..2] 12=4*3
    348     ldm  r0, {r3-r13}     @ load remaining gprs from argument gprs_
    349     mov  r0, #0           @ clear result registers r0 and r1
    350     mov  r1, #0
    351     bx   r2               @ do long jump
    352 END art_quick_do_long_jump
    353 
    354     /*
    355      * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
    356      * failure.
    357      */
    358     .extern artHandleFillArrayDataFromCode
    359 ENTRY art_quick_handle_fill_data
    360     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case exception allocation triggers GC
    361     mov    r2, r9                          @ pass Thread::Current
    362     mov    r3, sp                          @ pass SP
    363     bl     artHandleFillArrayDataFromCode  @ (Array*, const DexFile::Payload*, Thread*, SP)
    364     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    365     RETURN_IF_RESULT_IS_ZERO
    366     DELIVER_PENDING_EXCEPTION
    367 END art_quick_handle_fill_data
    368 
    369     /*
    370      * Entry from managed code that calls artLockObjectFromCode, may block for GC. r0 holds the
    371      * possibly null object to lock.
    372      */
    373     .extern artLockObjectFromCode
    374 ENTRY art_quick_lock_object
    375     cbz    r0, .Lslow_lock
    376 .Lretry_lock:
    377     ldr    r2, [r9, #THREAD_ID_OFFSET]
    378     ldrex  r1, [r0, #LOCK_WORD_OFFSET]
    379     cbnz   r1, .Lnot_unlocked         @ already thin locked
    380     @ unlocked case - r2 holds thread id with count of 0
    381     strex  r3, r2, [r0, #LOCK_WORD_OFFSET]
    382     cbnz   r3, .Lstrex_fail           @ store failed, retry
    383     dmb    ish                        @ full (LoadLoad|LoadStore) memory barrier
    384     bx lr
    385 .Lstrex_fail:
    386     b .Lretry_lock                    @ unlikely forward branch, need to reload and recheck r1/r2
    387 .Lnot_unlocked:
    388     lsr    r3, r1, 30
    389     cbnz   r3, .Lslow_lock            @ if either of the top two bits are set, go slow path
    390     eor    r2, r1, r2                 @ lock_word.ThreadId() ^ self->ThreadId()
    391     uxth   r2, r2                     @ zero top 16 bits
    392     cbnz   r2, .Lslow_lock            @ lock word and self thread id's match -> recursive lock
    393                                       @ else contention, go to slow path
    394     add    r2, r1, #65536             @ increment count in lock word placing in r2 for storing
    395     lsr    r1, r2, 30                 @ if either of the top two bits are set, we overflowed.
    396     cbnz   r1, .Lslow_lock            @ if we overflow the count go slow path
    397     str    r2, [r0, #LOCK_WORD_OFFSET] @ no need for strex as we hold the lock
    398     bx lr
    399 .Lslow_lock:
    400     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case we block
    401     mov    r1, r9                     @ pass Thread::Current
    402     mov    r2, sp                     @ pass SP
    403     bl     artLockObjectFromCode      @ (Object* obj, Thread*, SP)
    404     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    405     RETURN_IF_RESULT_IS_ZERO
    406     DELIVER_PENDING_EXCEPTION
    407 END art_quick_lock_object
    408 
    409     /*
    410      * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
    411      * r0 holds the possibly null object to lock.
    412      */
    413     .extern artUnlockObjectFromCode
    414 ENTRY art_quick_unlock_object
    415     cbz    r0, .Lslow_unlock
    416     ldr    r1, [r0, #LOCK_WORD_OFFSET]
    417     lsr    r2, r1, 30
    418     cbnz   r2, .Lslow_unlock          @ if either of the top two bits are set, go slow path
    419     ldr    r2, [r9, #THREAD_ID_OFFSET]
    420     eor    r3, r1, r2                 @ lock_word.ThreadId() ^ self->ThreadId()
    421     uxth   r3, r3                     @ zero top 16 bits
    422     cbnz   r3, .Lslow_unlock          @ do lock word and self thread id's match?
    423     cmp    r1, #65536
    424     bpl    .Lrecursive_thin_unlock
    425     @ transition to unlocked, r3 holds 0
    426     dmb    ish                        @ full (LoadStore|StoreStore) memory barrier
    427     str    r3, [r0, #LOCK_WORD_OFFSET]
    428     bx     lr
    429 .Lrecursive_thin_unlock:
    430     sub    r1, r1, #65536
    431     str    r1, [r0, #LOCK_WORD_OFFSET]
    432     bx     lr
    433 .Lslow_unlock:
    434     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case exception allocation triggers GC
    435     mov    r1, r9                     @ pass Thread::Current
    436     mov    r2, sp                     @ pass SP
    437     bl     artUnlockObjectFromCode    @ (Object* obj, Thread*, SP)
    438     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    439     RETURN_IF_RESULT_IS_ZERO
    440     DELIVER_PENDING_EXCEPTION
    441 END art_quick_unlock_object
    442 
    443     /*
    444      * Entry from managed code that calls artIsAssignableFromCode and on failure calls
    445      * artThrowClassCastException.
    446      */
    447     .extern artThrowClassCastException
    448 ENTRY art_quick_check_cast
    449     push {r0-r1, lr}                    @ save arguments, link register and pad
    450     .save {r0-r1, lr}
    451     .cfi_adjust_cfa_offset 12
    452     .cfi_rel_offset r0, 0
    453     .cfi_rel_offset r1, 4
    454     .cfi_rel_offset lr, 8
    455     sub sp, #4
    456     .pad #4
    457     .cfi_adjust_cfa_offset 4
    458     bl artIsAssignableFromCode
    459     cbz    r0, .Lthrow_class_cast_exception
    460     add sp, #4
    461     .cfi_adjust_cfa_offset -4
    462     pop {r0-r1, pc}
    463 .Lthrow_class_cast_exception:
    464     add sp, #4
    465     .cfi_adjust_cfa_offset -4
    466     pop {r0-r1, lr}
    467     .cfi_restore r0
    468     .cfi_restore r1
    469     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME  // save all registers as basis for long jump context
    470     mov r2, r9                      @ pass Thread::Current
    471     mov r3, sp                      @ pass SP
    472     b   artThrowClassCastException  @ (Class*, Class*, Thread*, SP)
    473     bkpt
    474 END art_quick_check_cast
    475 
    476     /*
    477      * Entry from managed code for array put operations of objects where the value being stored
    478      * needs to be checked for compatibility.
    479      * r0 = array, r1 = index, r2 = value
    480      */
    481 ENTRY art_quick_aput_obj_with_null_and_bound_check
    482     tst r0, r0
    483     bne art_quick_aput_obj_with_bound_check
    484     b art_quick_throw_null_pointer_exception
    485 END art_quick_aput_obj_with_null_and_bound_check
    486 
    487     .hidden art_quick_aput_obj_with_bound_check
    488 ENTRY art_quick_aput_obj_with_bound_check
    489     ldr r3, [r0, #ARRAY_LENGTH_OFFSET]
    490     cmp r3, r1
    491     bhi art_quick_aput_obj
    492     mov r0, r1
    493     mov r1, r3
    494     b art_quick_throw_array_bounds
    495 END art_quick_aput_obj_with_bound_check
    496 
    497     .hidden art_quick_aput_obj
    498 ENTRY art_quick_aput_obj
    499     cbz r2, .Ldo_aput_null
    500     ldr r3, [r0, #CLASS_OFFSET]
    501     ldr ip, [r2, #CLASS_OFFSET]
    502     ldr r3, [r3, #CLASS_COMPONENT_TYPE_OFFSET]
    503     cmp r3, ip  @ value's type == array's component type - trivial assignability
    504     bne .Lcheck_assignability
    505 .Ldo_aput:
    506     add r3, r0, #OBJECT_ARRAY_DATA_OFFSET
    507     str r2, [r3, r1, lsl #2]
    508     ldr r3, [r9, #THREAD_CARD_TABLE_OFFSET]
    509     lsr r0, r0, #7
    510     strb r3, [r3, r0]
    511     blx lr
    512 .Ldo_aput_null:
    513     add r3, r0, #OBJECT_ARRAY_DATA_OFFSET
    514     str r2, [r3, r1, lsl #2]
    515     blx lr
    516 .Lcheck_assignability:
    517     push {r0-r2, lr}             @ save arguments
    518     .save {r0-r2, lr}
    519     .cfi_adjust_cfa_offset 16
    520     .cfi_rel_offset r0, 0
    521     .cfi_rel_offset r1, 4
    522     .cfi_rel_offset r2, 8
    523     .cfi_rel_offset lr, 12
    524     mov r1, ip
    525     mov r0, r3
    526     bl artIsAssignableFromCode
    527     cbz r0, .Lthrow_array_store_exception
    528     pop {r0-r2, lr}
    529     .cfi_restore r0
    530     .cfi_restore r1
    531     .cfi_restore r2
    532     .cfi_restore lr
    533     .cfi_adjust_cfa_offset -16
    534     add r3, r0, #OBJECT_ARRAY_DATA_OFFSET
    535     str r2, [r3, r1, lsl #2]
    536     ldr r3, [r9, #THREAD_CARD_TABLE_OFFSET]
    537     lsr r0, r0, #7
    538     strb r3, [r3, r0]
    539     blx lr
    540 .Lthrow_array_store_exception:
    541     pop {r0-r2, lr}
    542     .cfi_restore r0
    543     .cfi_restore r1
    544     .cfi_restore r2
    545     .cfi_restore lr
    546     .cfi_adjust_cfa_offset -16
    547     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
    548     mov r1, r2
    549     mov r2, r9                   @ pass Thread::Current
    550     mov r3, sp                   @ pass SP
    551     b artThrowArrayStoreException  @ (Class*, Class*, Thread*, SP)
    552     bkpt                         @ unreached
    553 END art_quick_aput_obj
    554 
    555     /*
    556      * Entry from managed code when uninitialized static storage, this stub will run the class
    557      * initializer and deliver the exception on error. On success the static storage base is
    558      * returned.
    559      */
    560     .extern artInitializeStaticStorageFromCode
    561 ENTRY art_quick_initialize_static_storage
    562     SETUP_REF_ONLY_CALLEE_SAVE_FRAME           @ save callee saves in case of GC
    563     mov    r2, r9                              @ pass Thread::Current
    564     mov    r3, sp                              @ pass SP
    565     @ artInitializeStaticStorageFromCode(uint32_t type_idx, Method* referrer, Thread*, SP)
    566     bl     artInitializeStaticStorageFromCode
    567     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    568     RETURN_IF_RESULT_IS_NON_ZERO
    569     DELIVER_PENDING_EXCEPTION
    570 END art_quick_initialize_static_storage
    571 
    572     /*
    573      * Entry from managed code when dex cache misses for a type_idx
    574      */
    575     .extern artInitializeTypeFromCode
    576 ENTRY art_quick_initialize_type
    577     SETUP_REF_ONLY_CALLEE_SAVE_FRAME           @ save callee saves in case of GC
    578     mov    r2, r9                              @ pass Thread::Current
    579     mov    r3, sp                              @ pass SP
    580     @ artInitializeTypeFromCode(uint32_t type_idx, Method* referrer, Thread*, SP)
    581     bl     artInitializeTypeFromCode
    582     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    583     RETURN_IF_RESULT_IS_NON_ZERO
    584     DELIVER_PENDING_EXCEPTION
    585 END art_quick_initialize_type
    586 
    587     /*
    588      * Entry from managed code when type_idx needs to be checked for access and dex cache may also
    589      * miss.
    590      */
    591     .extern artInitializeTypeAndVerifyAccessFromCode
    592 ENTRY art_quick_initialize_type_and_verify_access
    593     SETUP_REF_ONLY_CALLEE_SAVE_FRAME           @ save callee saves in case of GC
    594     mov    r2, r9                              @ pass Thread::Current
    595     mov    r3, sp                              @ pass SP
    596     @ artInitializeTypeAndVerifyAccessFromCode(uint32_t type_idx, Method* referrer, Thread*, SP)
    597     bl     artInitializeTypeAndVerifyAccessFromCode
    598     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    599     RETURN_IF_RESULT_IS_NON_ZERO
    600     DELIVER_PENDING_EXCEPTION
    601 END art_quick_initialize_type_and_verify_access
    602 
    603     /*
    604      * Called by managed code to resolve a static field and load a 32-bit primitive value.
    605      */
    606     .extern artGet32StaticFromCode
    607 ENTRY art_quick_get32_static
    608     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    609     ldr    r1, [sp, #32]                 @ pass referrer
    610     mov    r2, r9                        @ pass Thread::Current
    611     mov    r3, sp                        @ pass SP
    612     bl     artGet32StaticFromCode        @ (uint32_t field_idx, const Method* referrer, Thread*, SP)
    613     ldr    r1, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
    614     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    615     cbnz   r1, 1f                        @ success if no exception pending
    616     bx     lr                            @ return on success
    617 1:
    618     DELIVER_PENDING_EXCEPTION
    619 END art_quick_get32_static
    620 
    621     /*
    622      * Called by managed code to resolve a static field and load a 64-bit primitive value.
    623      */
    624     .extern artGet64StaticFromCode
    625 ENTRY art_quick_get64_static
    626     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    627     ldr    r1, [sp, #32]                 @ pass referrer
    628     mov    r2, r9                        @ pass Thread::Current
    629     mov    r3, sp                        @ pass SP
    630     bl     artGet64StaticFromCode        @ (uint32_t field_idx, const Method* referrer, Thread*, SP)
    631     ldr    r2, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
    632     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    633     cbnz   r2, 1f                        @ success if no exception pending
    634     bx     lr                            @ return on success
    635 1:
    636     DELIVER_PENDING_EXCEPTION
    637 END art_quick_get64_static
    638 
    639     /*
    640      * Called by managed code to resolve a static field and load an object reference.
    641      */
    642     .extern artGetObjStaticFromCode
    643 ENTRY art_quick_get_obj_static
    644     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    645     ldr    r1, [sp, #32]                 @ pass referrer
    646     mov    r2, r9                        @ pass Thread::Current
    647     mov    r3, sp                        @ pass SP
    648     bl     artGetObjStaticFromCode       @ (uint32_t field_idx, const Method* referrer, Thread*, SP)
    649     ldr    r1, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
    650     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    651     cbnz   r1, 1f                        @ success if no exception pending
    652     bx     lr                            @ return on success
    653 1:
    654     DELIVER_PENDING_EXCEPTION
    655 END art_quick_get_obj_static
    656 
    657     /*
    658      * Called by managed code to resolve an instance field and load a 32-bit primitive value.
    659      */
    660     .extern artGet32InstanceFromCode
    661 ENTRY art_quick_get32_instance
    662     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    663     ldr    r2, [sp, #32]                 @ pass referrer
    664     mov    r3, r9                        @ pass Thread::Current
    665     mov    r12, sp
    666     str    r12, [sp, #-16]!              @ expand the frame and pass SP
    667     bl     artGet32InstanceFromCode      @ (field_idx, Object*, referrer, Thread*, SP)
    668     add    sp, #16                       @ strip the extra frame
    669     ldr    r1, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
    670     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    671     cbnz   r1, 1f                        @ success if no exception pending
    672     bx     lr                            @ return on success
    673 1:
    674     DELIVER_PENDING_EXCEPTION
    675 END art_quick_get32_instance
    676 
    677     /*
    678      * Called by managed code to resolve an instance field and load a 64-bit primitive value.
    679      */
    680     .extern artGet64InstanceFromCode
    681 ENTRY art_quick_get64_instance
    682     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    683     ldr    r2, [sp, #32]                 @ pass referrer
    684     mov    r3, r9                        @ pass Thread::Current
    685     mov    r12, sp
    686     str    r12, [sp, #-16]!              @ expand the frame and pass SP
    687     .pad #16
    688     .cfi_adjust_cfa_offset 16
    689     bl     artGet64InstanceFromCode      @ (field_idx, Object*, referrer, Thread*, SP)
    690     add    sp, #16                       @ strip the extra frame
    691     .cfi_adjust_cfa_offset -16
    692     ldr    r2, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
    693     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    694     cbnz   r2, 1f                        @ success if no exception pending
    695     bx     lr                            @ return on success
    696 1:
    697     DELIVER_PENDING_EXCEPTION
    698 END art_quick_get64_instance
    699 
    700     /*
    701      * Called by managed code to resolve an instance field and load an object reference.
    702      */
    703     .extern artGetObjInstanceFromCode
    704 ENTRY art_quick_get_obj_instance
    705     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    706     ldr    r2, [sp, #32]                 @ pass referrer
    707     mov    r3, r9                        @ pass Thread::Current
    708     mov    r12, sp
    709     str    r12, [sp, #-16]!              @ expand the frame and pass SP
    710     .pad #16
    711     .cfi_adjust_cfa_offset 16
    712     bl     artGetObjInstanceFromCode     @ (field_idx, Object*, referrer, Thread*, SP)
    713     add    sp, #16                       @ strip the extra frame
    714     .cfi_adjust_cfa_offset -16
    715     ldr    r1, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
    716     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    717     cbnz   r1, 1f                        @ success if no exception pending
    718     bx     lr                            @ return on success
    719 1:
    720     DELIVER_PENDING_EXCEPTION
    721 END art_quick_get_obj_instance
    722 
    723     /*
    724      * Called by managed code to resolve a static field and store a 32-bit primitive value.
    725      */
    726     .extern artSet32StaticFromCode
    727 ENTRY art_quick_set32_static
    728     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    729     ldr    r2, [sp, #32]                 @ pass referrer
    730     mov    r3, r9                        @ pass Thread::Current
    731     mov    r12, sp
    732     str    r12, [sp, #-16]!              @ expand the frame and pass SP
    733     .pad #16
    734     .cfi_adjust_cfa_offset 16
    735     bl     artSet32StaticFromCode        @ (field_idx, new_val, referrer, Thread*, SP)
    736     add    sp, #16                       @ strip the extra frame
    737     .cfi_adjust_cfa_offset -16
    738     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    739     RETURN_IF_RESULT_IS_ZERO
    740     DELIVER_PENDING_EXCEPTION
    741 END art_quick_set32_static
    742 
    743     /*
    744      * Called by managed code to resolve a static field and store a 64-bit primitive value.
    745      * On entry r0 holds field index, r1:r2 hold new_val
    746      */
    747     .extern artSet64StaticFromCode
    748 ENTRY art_quick_set64_static
    749     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    750     mov    r3, r2                        @ pass one half of wide argument
    751     mov    r2, r1                        @ pass other half of wide argument
    752     ldr    r1, [sp, #32]                 @ pass referrer
    753     mov    r12, sp                       @ save SP
    754     sub    sp, #8                        @ grow frame for alignment with stack args
    755     .pad #8
    756     .cfi_adjust_cfa_offset 8
    757     push   {r9, r12}                     @ pass Thread::Current and SP
    758     .save {r9, r12}
    759     .cfi_adjust_cfa_offset 8
    760     .cfi_rel_offset r9, 0
    761     bl     artSet64StaticFromCode        @ (field_idx, referrer, new_val, Thread*, SP)
    762     add    sp, #16                       @ release out args
    763     .cfi_adjust_cfa_offset -16
    764     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME   @ TODO: we can clearly save an add here
    765     RETURN_IF_RESULT_IS_ZERO
    766     DELIVER_PENDING_EXCEPTION
    767 END art_quick_set64_static
    768 
    769     /*
    770      * Called by managed code to resolve a static field and store an object reference.
    771      */
    772     .extern artSetObjStaticFromCode
    773 ENTRY art_quick_set_obj_static
    774     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    775     ldr    r2, [sp, #32]                 @ pass referrer
    776     mov    r3, r9                        @ pass Thread::Current
    777     mov    r12, sp
    778     str    r12, [sp, #-16]!              @ expand the frame and pass SP
    779     .pad #16
    780     .cfi_adjust_cfa_offset 16
    781     bl     artSetObjStaticFromCode       @ (field_idx, new_val, referrer, Thread*, SP)
    782     add    sp, #16                       @ strip the extra frame
    783     .cfi_adjust_cfa_offset -16
    784     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    785     RETURN_IF_RESULT_IS_ZERO
    786     DELIVER_PENDING_EXCEPTION
    787 END art_quick_set_obj_static
    788 
    789     /*
    790      * Called by managed code to resolve an instance field and store a 32-bit primitive value.
    791      */
    792     .extern artSet32InstanceFromCode
    793 ENTRY art_quick_set32_instance
    794     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    795     ldr    r3, [sp, #32]                 @ pass referrer
    796     mov    r12, sp                       @ save SP
    797     sub    sp, #8                        @ grow frame for alignment with stack args
    798     .pad #8
    799     .cfi_adjust_cfa_offset 8
    800     push   {r9, r12}                     @ pass Thread::Current and SP
    801     .save {r9, r12}
    802     .cfi_adjust_cfa_offset 8
    803     .cfi_rel_offset r9, 0
    804     .cfi_rel_offset r12, 4
    805     bl     artSet32InstanceFromCode      @ (field_idx, Object*, new_val, referrer, Thread*, SP)
    806     add    sp, #16                       @ release out args
    807     .cfi_adjust_cfa_offset -16
    808     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME   @ TODO: we can clearly save an add here
    809     RETURN_IF_RESULT_IS_ZERO
    810     DELIVER_PENDING_EXCEPTION
    811 END art_quick_set32_instance
    812 
    813     /*
    814      * Called by managed code to resolve an instance field and store a 64-bit primitive value.
    815      */
    816     .extern artSet32InstanceFromCode
    817 ENTRY art_quick_set64_instance
    818     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    819     mov    r12, sp                       @ save SP
    820     sub    sp, #8                        @ grow frame for alignment with stack args
    821     .pad #8
    822     .cfi_adjust_cfa_offset 8
    823     push   {r9, r12}                     @ pass Thread::Current and SP
    824     .save {r9, r12}
    825     .cfi_adjust_cfa_offset 8
    826     .cfi_rel_offset r9, 0
    827     bl     artSet64InstanceFromCode      @ (field_idx, Object*, new_val, Thread*, SP)
    828     add    sp, #16                       @ release out args
    829     .cfi_adjust_cfa_offset -16
    830     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME   @ TODO: we can clearly save an add here
    831     RETURN_IF_RESULT_IS_ZERO
    832     DELIVER_PENDING_EXCEPTION
    833 END art_quick_set64_instance
    834 
    835     /*
    836      * Called by managed code to resolve an instance field and store an object reference.
    837      */
    838     .extern artSetObjInstanceFromCode
    839 ENTRY art_quick_set_obj_instance
    840     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
    841     ldr    r3, [sp, #32]                 @ pass referrer
    842     mov    r12, sp                       @ save SP
    843     sub    sp, #8                        @ grow frame for alignment with stack args
    844     .pad #8
    845     .cfi_adjust_cfa_offset 8
    846     push   {r9, r12}                     @ pass Thread::Current and SP
    847     .save {r9, r12}
    848     .cfi_adjust_cfa_offset 8
    849     .cfi_rel_offset r9, 0
    850     bl     artSetObjInstanceFromCode     @ (field_idx, Object*, new_val, referrer, Thread*, SP)
    851     add    sp, #16                       @ release out args
    852     .cfi_adjust_cfa_offset -16
    853     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME   @ TODO: we can clearly save an add here
    854     RETURN_IF_RESULT_IS_ZERO
    855     DELIVER_PENDING_EXCEPTION
    856 END art_quick_set_obj_instance
    857 
    858     /*
    859      * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
    860      * exception on error. On success the String is returned. R0 holds the referring method,
    861      * R1 holds the string index. The fast path check for hit in strings cache has already been
    862      * performed.
    863      */
    864     .extern artResolveStringFromCode
    865 ENTRY art_quick_resolve_string
    866     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
    867     mov    r2, r9                     @ pass Thread::Current
    868     mov    r3, sp                     @ pass SP
    869     @ artResolveStringFromCode(Method* referrer, uint32_t string_idx, Thread*, SP)
    870     bl     artResolveStringFromCode
    871     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    872     RETURN_IF_RESULT_IS_NON_ZERO
    873     DELIVER_PENDING_EXCEPTION
    874 END art_quick_resolve_string
    875 
    876 // Macro to facilitate adding new allocation entrypoints.
    877 .macro TWO_ARG_DOWNCALL name, entrypoint, return
    878     .extern \entrypoint
    879 ENTRY \name
    880     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
    881     mov    r2, r9                     @ pass Thread::Current
    882     mov    r3, sp                     @ pass SP
    883     bl     \entrypoint     @ (uint32_t type_idx, Method* method, Thread*, SP)
    884     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    885     \return
    886     DELIVER_PENDING_EXCEPTION
    887 END \name
    888 .endm
    889 
    890 // Macro to facilitate adding new array allocation entrypoints.
    891 .macro THREE_ARG_DOWNCALL name, entrypoint, return
    892     .extern \entrypoint
    893 ENTRY \name
    894     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
    895     mov    r3, r9                     @ pass Thread::Current
    896     mov    r12, sp
    897     str    r12, [sp, #-16]!           @ expand the frame and pass SP
    898     .pad #16
    899     .cfi_adjust_cfa_offset 16
    900     @ (uint32_t type_idx, Method* method, int32_t component_count, Thread*, SP)
    901     bl     \entrypoint
    902     add    sp, #16                    @ strip the extra frame
    903     .cfi_adjust_cfa_offset -16
    904     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    905     \return
    906     DELIVER_PENDING_EXCEPTION
    907 END \name
    908 .endm
    909 
    910 // Generate the allocation entrypoints for each allocator.
    911 GENERATE_ALL_ALLOC_ENTRYPOINTS
    912 
    913     /*
    914      * Called by managed code when the value in rSUSPEND has been decremented to 0.
    915      */
    916     .extern artTestSuspendFromCode
    917 ENTRY art_quick_test_suspend
    918 #ifdef ARM_R4_SUSPEND_FLAG
    919     ldrh    r0, [rSELF, #THREAD_FLAGS_OFFSET]
    920     mov    rSUSPEND, #SUSPEND_CHECK_INTERVAL  @ reset rSUSPEND to SUSPEND_CHECK_INTERVAL
    921     cbnz   r0, 1f                             @ check Thread::Current()->suspend_count_ == 0
    922     bx     lr                                 @ return if suspend_count_ == 0
    923 1:
    924 #endif
    925     mov    r0, rSELF
    926     SETUP_REF_ONLY_CALLEE_SAVE_FRAME          @ save callee saves for stack crawl
    927     mov    r1, sp
    928     bl     artTestSuspendFromCode             @ (Thread*, SP)
    929     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
    930 END art_quick_test_suspend
    931 
    932 ENTRY art_quick_implicit_suspend
    933     mov    r0, rSELF
    934     SETUP_REF_ONLY_CALLEE_SAVE_FRAME          @ save callee saves for stack crawl
    935     mov    r1, sp
    936     bl     artTestSuspendFromCode             @ (Thread*, SP)
    937     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
    938 END art_quick_implicit_suspend
    939 
    940     /*
    941      * Called by managed code that is attempting to call a method on a proxy class. On entry
    942      * r0 holds the proxy method and r1 holds the receiver; r2 and r3 may contain arguments. The
    943      * frame size of the invoked proxy method agrees with a ref and args callee save frame.
    944      */
    945      .extern artQuickProxyInvokeHandler
    946 ENTRY art_quick_proxy_invoke_handler
    947     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
    948     str     r0, [sp, #0]           @ place proxy method at bottom of frame
    949     mov     r2, r9                 @ pass Thread::Current
    950     mov     r3, sp                 @ pass SP
    951     blx     artQuickProxyInvokeHandler  @ (Method* proxy method, receiver, Thread*, SP)
    952     ldr     r2, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
    953     add     sp, #16                @ skip r1-r3, 4 bytes padding.
    954     .cfi_adjust_cfa_offset -16
    955     cbnz    r2, 1f                 @ success if no exception is pending
    956     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    957     bx      lr                     @ return on success
    958 1:
    959     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
    960     DELIVER_PENDING_EXCEPTION
    961 END art_quick_proxy_invoke_handler
    962 
    963     /*
    964      * Called to resolve an imt conflict. r12 is a hidden argument that holds the target method's
    965      * dex method index.
    966      */
    967 ENTRY art_quick_imt_conflict_trampoline
    968     ldr    r0, [sp, #0]            @ load caller Method*
    969     ldr    r0, [r0, #METHOD_DEX_CACHE_METHODS_OFFSET]  @ load dex_cache_resolved_methods
    970     add    r0, #OBJECT_ARRAY_DATA_OFFSET  @ get starting address of data
    971     ldr    r0, [r0, r12, lsl 2]    @ load the target method
    972     b art_quick_invoke_interface_trampoline
    973 END art_quick_imt_conflict_trampoline
    974 
    975     .extern artQuickResolutionTrampoline
    976 ENTRY art_quick_resolution_trampoline
    977     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
    978     mov     r2, r9                 @ pass Thread::Current
    979     mov     r3, sp                 @ pass SP
    980     blx     artQuickResolutionTrampoline  @ (Method* called, receiver, Thread*, SP)
    981     cbz     r0, 1f                 @ is code pointer null? goto exception
    982     mov     r12, r0
    983     ldr  r0, [sp, #0]              @ load resolved method in r0
    984     RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
    985     bx      r12                    @ tail-call into actual code
    986 1:
    987     RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
    988     DELIVER_PENDING_EXCEPTION
    989 END art_quick_resolution_trampoline
    990 
    991     /*
    992      * Called to do a generic JNI down-call
    993      */
    994 ENTRY_NO_HIDE art_quick_generic_jni_trampoline
    995     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
    996     str r0, [sp, #0]  // Store native ArtMethod* to bottom of stack.
    997 
    998     // Save rSELF
    999     mov r11, rSELF
   1000     // Save SP , so we can have static CFI info. r10 is saved in ref_and_args.
   1001     mov r10, sp
   1002     .cfi_def_cfa_register r10
   1003 
   1004     sub sp, sp, #5120
   1005 
   1006     // prepare for artQuickGenericJniTrampoline call
   1007     // (Thread*,  SP)
   1008     //    r0      r1   <= C calling convention
   1009     //  rSELF     r10  <= where they are
   1010 
   1011     mov r0, rSELF   // Thread*
   1012     mov r1, r10
   1013     blx artQuickGenericJniTrampoline  // (Thread*, sp)
   1014 
   1015     // The C call will have registered the complete save-frame on success.
   1016     // The result of the call is:
   1017     // r0: pointer to native code, 0 on error.
   1018     // r1: pointer to the bottom of the used area of the alloca, can restore stack till there.
   1019 
   1020     // Check for error = 0.
   1021     cbz r0, .Lentry_error
   1022 
   1023     // Release part of the alloca.
   1024     mov sp, r1
   1025 
   1026     // Save the code pointer
   1027     mov r12, r0
   1028 
   1029     // Load parameters from frame into registers.
   1030     pop {r0-r3}
   1031 
   1032     // Softfloat.
   1033     // TODO: Change to hardfloat when supported.
   1034 
   1035     blx r12           // native call.
   1036 
   1037     // result sign extension is handled in C code
   1038     // prepare for artQuickGenericJniEndTrampoline call
   1039     // (Thread*, result, result_f)
   1040     //    r0      r2,r3    stack       <= C calling convention
   1041     //    r11     r0,r1    r0,r1          <= where they are
   1042     sub sp, sp, #8 // Stack alignment.
   1043 
   1044     push {r0-r1}
   1045     mov r3, r1
   1046     mov r2, r0
   1047     mov r0, r11
   1048 
   1049     blx artQuickGenericJniEndTrampoline
   1050 
   1051     // Tear down the alloca.
   1052     mov sp, r10
   1053     .cfi_def_cfa_register sp
   1054 
   1055     // Restore self pointer.
   1056     mov r9, r11
   1057 
   1058     // Pending exceptions possible.
   1059     ldr r2, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
   1060     cbnz r2, .Lexception_in_native
   1061 
   1062     // Tear down the callee-save frame.
   1063     add  sp, #12                      @ rewind sp
   1064     // Do not pop r0 and r1, they contain the return value.
   1065     pop {r2-r3, r5-r8, r10-r11, lr}  @ 9 words of callee saves
   1066     .cfi_restore r2
   1067     .cfi_restore r3
   1068     .cfi_restore r5
   1069     .cfi_restore r6
   1070     .cfi_restore r7
   1071     .cfi_restore r8
   1072     .cfi_restore r10
   1073     .cfi_restore r11
   1074     .cfi_adjust_cfa_offset -48
   1075 
   1076     bx lr      // ret
   1077 
   1078 .Lentry_error:
   1079     mov sp, r10
   1080     .cfi_def_cfa_register sp
   1081     mov r9, r11
   1082 .Lexception_in_native:
   1083     RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
   1084     DELIVER_PENDING_EXCEPTION
   1085 
   1086 END art_quick_generic_jni_trampoline
   1087 
   1088     .extern artQuickToInterpreterBridge
   1089 ENTRY_NO_HIDE art_quick_to_interpreter_bridge
   1090     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
   1091     mov     r1, r9                 @ pass Thread::Current
   1092     mov     r2, sp                 @ pass SP
   1093     blx     artQuickToInterpreterBridge    @ (Method* method, Thread*, SP)
   1094     ldr     r2, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
   1095     add     sp, #16                @ skip r1-r3, 4 bytes padding.
   1096     .cfi_adjust_cfa_offset -16
   1097     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
   1098     cbnz    r2, 1f                 @ success if no exception is pending
   1099     bx    lr                       @ return on success
   1100 1:
   1101     DELIVER_PENDING_EXCEPTION
   1102 END art_quick_to_interpreter_bridge
   1103 
   1104     /*
   1105      * Routine that intercepts method calls and returns.
   1106      */
   1107     .extern artInstrumentationMethodEntryFromCode
   1108     .extern artInstrumentationMethodExitFromCode
   1109 ENTRY art_quick_instrumentation_entry
   1110     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
   1111     str   r0, [sp, #4]     @ preserve r0
   1112     mov   r12, sp          @ remember sp
   1113     str   lr, [sp, #-16]!  @ expand the frame and pass LR
   1114     .pad #16
   1115     .cfi_adjust_cfa_offset 16
   1116     .cfi_rel_offset lr, 0
   1117     mov   r2, r9         @ pass Thread::Current
   1118     mov   r3, r12        @ pass SP
   1119     blx   artInstrumentationMethodEntryFromCode  @ (Method*, Object*, Thread*, SP, LR)
   1120     add   sp, #16        @ remove out argument and padding from stack
   1121     .cfi_adjust_cfa_offset -16
   1122     mov   r12, r0        @ r12 holds reference to code
   1123     ldr   r0, [sp, #4]   @ restore r0
   1124     RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
   1125     blx   r12            @ call method with lr set to art_quick_instrumentation_exit
   1126 END art_quick_instrumentation_entry
   1127     .type art_quick_instrumentation_exit, #function
   1128     .global art_quick_instrumentation_exit
   1129 art_quick_instrumentation_exit:
   1130     .cfi_startproc
   1131     .fnstart
   1132     mov   lr, #0         @ link register is to here, so clobber with 0 for later checks
   1133     SETUP_REF_ONLY_CALLEE_SAVE_FRAME
   1134     mov   r12, sp        @ remember bottom of caller's frame
   1135     push  {r0-r1}        @ save return value
   1136     .save {r0-r1}
   1137     .cfi_adjust_cfa_offset 8
   1138     .cfi_rel_offset r0, 0
   1139     .cfi_rel_offset r1, 4
   1140     sub   sp, #8         @ space for return value argument
   1141     .pad #8
   1142     .cfi_adjust_cfa_offset 8
   1143     strd r0, [sp]        @ r0/r1 -> [sp] for fpr_res
   1144     mov   r2, r0         @ pass return value as gpr_res
   1145     mov   r3, r1
   1146     mov   r0, r9         @ pass Thread::Current
   1147     mov   r1, r12        @ pass SP
   1148     blx   artInstrumentationMethodExitFromCode  @ (Thread*, SP, gpr_res, fpr_res)
   1149     add   sp, #8
   1150     .cfi_adjust_cfa_offset -8
   1151 
   1152     mov   r2, r0         @ link register saved by instrumentation
   1153     mov   lr, r1         @ r1 is holding link register if we're to bounce to deoptimize
   1154     pop   {r0, r1}       @ restore return value
   1155     .cfi_restore r0
   1156     .cfi_restore r1
   1157     add sp, #32          @ remove callee save frame
   1158     .cfi_adjust_cfa_offset -32
   1159     bx    r2             @ return
   1160 END art_quick_instrumentation_exit
   1161 
   1162     /*
   1163      * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
   1164      * will long jump to the upcall with a special exception of -1.
   1165      */
   1166     .extern artDeoptimize
   1167 ENTRY art_quick_deoptimize
   1168     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
   1169     mov    r0, r9         @ Set up args.
   1170     mov    r1, sp
   1171     blx    artDeoptimize  @ artDeoptimize(Thread*, SP)
   1172 END art_quick_deoptimize
   1173 
   1174     /*
   1175      * Signed 64-bit integer multiply.
   1176      *
   1177      * Consider WXxYZ (r1r0 x r3r2) with a long multiply:
   1178      *        WX
   1179      *      x YZ
   1180      *  --------
   1181      *     ZW ZX
   1182      *  YW YX
   1183      *
   1184      * The low word of the result holds ZX, the high word holds
   1185      * (ZW+YX) + (the high overflow from ZX).  YW doesn't matter because
   1186      * it doesn't fit in the low 64 bits.
   1187      *
   1188      * Unlike most ARM math operations, multiply instructions have
   1189      * restrictions on using the same register more than once (Rd and Rm
   1190      * cannot be the same).
   1191      */
   1192     /* mul-long vAA, vBB, vCC */
   1193 ENTRY art_quick_mul_long
   1194     push    {r9 - r10}
   1195     .save {r9 - r10}
   1196     .cfi_adjust_cfa_offset 8
   1197     .cfi_rel_offset r9, 0
   1198     .cfi_rel_offset r10, 4
   1199     mul     ip, r2, r1                  @  ip<- ZxW
   1200     umull   r9, r10, r2, r0             @  r9/r10 <- ZxX
   1201     mla     r2, r0, r3, ip              @  r2<- YxX + (ZxW)
   1202     add     r10, r2, r10                @  r10<- r10 + low(ZxW + (YxX))
   1203     mov     r0,r9
   1204     mov     r1,r10
   1205     pop     {r9 - r10}
   1206     .cfi_adjust_cfa_offset -8
   1207     .cfi_restore r9
   1208     .cfi_restore r10
   1209     bx      lr
   1210 END art_quick_mul_long
   1211 
   1212     /*
   1213      * Long integer shift.  This is different from the generic 32/64-bit
   1214      * binary operations because vAA/vBB are 64-bit but vCC (the shift
   1215      * distance) is 32-bit.  Also, Dalvik requires us to ignore all but the low
   1216      * 6 bits.
   1217      * On entry:
   1218      *   r0: low word
   1219      *   r1: high word
   1220      *   r2: shift count
   1221      */
   1222     /* shl-long vAA, vBB, vCC */
   1223 ARM_ENTRY art_quick_shl_long            @ ARM code as thumb code requires spills
   1224     and     r2, r2, #63                 @ r2<- r2 & 0x3f
   1225     mov     r1, r1, asl r2              @  r1<- r1 << r2
   1226     rsb     r3, r2, #32                 @  r3<- 32 - r2
   1227     orr     r1, r1, r0, lsr r3          @  r1<- r1 | (r0 << (32-r2))
   1228     subs    ip, r2, #32                 @  ip<- r2 - 32
   1229     movpl   r1, r0, asl ip              @  if r2 >= 32, r1<- r0 << (r2-32)
   1230     mov     r0, r0, asl r2              @  r0<- r0 << r2
   1231     bx      lr
   1232 END art_quick_shl_long
   1233 
   1234     /*
   1235      * Long integer shift.  This is different from the generic 32/64-bit
   1236      * binary operations because vAA/vBB are 64-bit but vCC (the shift
   1237      * distance) is 32-bit.  Also, Dalvik requires us to ignore all but the low
   1238      * 6 bits.
   1239      * On entry:
   1240      *   r0: low word
   1241      *   r1: high word
   1242      *   r2: shift count
   1243      */
   1244     /* shr-long vAA, vBB, vCC */
   1245 ARM_ENTRY art_quick_shr_long            @ ARM code as thumb code requires spills
   1246     and     r2, r2, #63                 @ r0<- r0 & 0x3f
   1247     mov     r0, r0, lsr r2              @  r0<- r2 >> r2
   1248     rsb     r3, r2, #32                 @  r3<- 32 - r2
   1249     orr     r0, r0, r1, asl r3          @  r0<- r0 | (r1 << (32-r2))
   1250     subs    ip, r2, #32                 @  ip<- r2 - 32
   1251     movpl   r0, r1, asr ip              @  if r2 >= 32, r0<-r1 >> (r2-32)
   1252     mov     r1, r1, asr r2              @  r1<- r1 >> r2
   1253     bx      lr
   1254 END art_quick_shr_long
   1255 
   1256     /*
   1257      * Long integer shift.  This is different from the generic 32/64-bit
   1258      * binary operations because vAA/vBB are 64-bit but vCC (the shift
   1259      * distance) is 32-bit.  Also, Dalvik requires us to ignore all but the low
   1260      * 6 bits.
   1261      * On entry:
   1262      *   r0: low word
   1263      *   r1: high word
   1264      *   r2: shift count
   1265      */
   1266     /* ushr-long vAA, vBB, vCC */
   1267 ARM_ENTRY art_quick_ushr_long           @ ARM code as thumb code requires spills
   1268     and     r2, r2, #63                 @ r0<- r0 & 0x3f
   1269     mov     r0, r0, lsr r2              @  r0<- r2 >> r2
   1270     rsb     r3, r2, #32                 @  r3<- 32 - r2
   1271     orr     r0, r0, r1, asl r3          @  r0<- r0 | (r1 << (32-r2))
   1272     subs    ip, r2, #32                 @  ip<- r2 - 32
   1273     movpl   r0, r1, lsr ip              @  if r2 >= 32, r0<-r1 >>> (r2-32)
   1274     mov     r1, r1, lsr r2              @  r1<- r1 >>> r2
   1275     bx      lr
   1276 END art_quick_ushr_long
   1277 
   1278     /*
   1279      * String's indexOf.
   1280      *
   1281      * On entry:
   1282      *    r0:   string object (known non-null)
   1283      *    r1:   char to match (known <= 0xFFFF)
   1284      *    r2:   Starting offset in string data
   1285      */
   1286 ENTRY art_quick_indexof
   1287     push {r4, r10-r11, lr} @ 4 words of callee saves
   1288     .save {r4, r10-r11, lr}
   1289     .cfi_adjust_cfa_offset 16
   1290     .cfi_rel_offset r4, 0
   1291     .cfi_rel_offset r10, 4
   1292     .cfi_rel_offset r11, 8
   1293     .cfi_rel_offset lr, 12
   1294     ldr   r3, [r0, #STRING_COUNT_OFFSET]
   1295     ldr   r12, [r0, #STRING_OFFSET_OFFSET]
   1296     ldr   r0, [r0, #STRING_VALUE_OFFSET]
   1297 
   1298     /* Clamp start to [0..count] */
   1299     cmp   r2, #0
   1300     it    lt
   1301     movlt r2, #0
   1302     cmp   r2, r3
   1303     it    gt
   1304     movgt r2, r3
   1305 
   1306     /* Build a pointer to the start of string data */
   1307     add   r0, #STRING_DATA_OFFSET
   1308     add   r0, r0, r12, lsl #1
   1309 
   1310     /* Save a copy in r12 to later compute result */
   1311     mov   r12, r0
   1312 
   1313     /* Build pointer to start of data to compare and pre-bias */
   1314     add   r0, r0, r2, lsl #1
   1315     sub   r0, #2
   1316 
   1317     /* Compute iteration count */
   1318     sub   r2, r3, r2
   1319 
   1320     /*
   1321      * At this point we have:
   1322      *   r0: start of data to test
   1323      *   r1: char to compare
   1324      *   r2: iteration count
   1325      *   r12: original start of string data
   1326      *   r3, r4, r10, r11 available for loading string data
   1327      */
   1328 
   1329     subs  r2, #4
   1330     blt   .Lindexof_remainder
   1331 
   1332 .Lindexof_loop4:
   1333     ldrh  r3, [r0, #2]!
   1334     ldrh  r4, [r0, #2]!
   1335     ldrh  r10, [r0, #2]!
   1336     ldrh  r11, [r0, #2]!
   1337     cmp   r3, r1
   1338     beq   .Lmatch_0
   1339     cmp   r4, r1
   1340     beq   .Lmatch_1
   1341     cmp   r10, r1
   1342     beq   .Lmatch_2
   1343     cmp   r11, r1
   1344     beq   .Lmatch_3
   1345     subs  r2, #4
   1346     bge   .Lindexof_loop4
   1347 
   1348 .Lindexof_remainder:
   1349     adds  r2, #4
   1350     beq   .Lindexof_nomatch
   1351 
   1352 .Lindexof_loop1:
   1353     ldrh  r3, [r0, #2]!
   1354     cmp   r3, r1
   1355     beq   .Lmatch_3
   1356     subs  r2, #1
   1357     bne   .Lindexof_loop1
   1358 
   1359 .Lindexof_nomatch:
   1360     mov   r0, #-1
   1361     pop {r4, r10-r11, pc}
   1362 
   1363 .Lmatch_0:
   1364     sub   r0, #6
   1365     sub   r0, r12
   1366     asr   r0, r0, #1
   1367     pop {r4, r10-r11, pc}
   1368 .Lmatch_1:
   1369     sub   r0, #4
   1370     sub   r0, r12
   1371     asr   r0, r0, #1
   1372     pop {r4, r10-r11, pc}
   1373 .Lmatch_2:
   1374     sub   r0, #2
   1375     sub   r0, r12
   1376     asr   r0, r0, #1
   1377     pop {r4, r10-r11, pc}
   1378 .Lmatch_3:
   1379     sub   r0, r12
   1380     asr   r0, r0, #1
   1381     pop {r4, r10-r11, pc}
   1382 END art_quick_indexof
   1383 
   1384    /*
   1385      * String's compareTo.
   1386      *
   1387      * Requires rARG0/rARG1 to have been previously checked for null.  Will
   1388      * return negative if this's string is < comp, 0 if they are the
   1389      * same and positive if >.
   1390      *
   1391      * On entry:
   1392      *    r0:   this object pointer
   1393      *    r1:   comp object pointer
   1394      *
   1395      */
   1396     .extern __memcmp16
   1397 ENTRY art_quick_string_compareto
   1398     mov    r2, r0         @ this to r2, opening up r0 for return value
   1399     sub    r0, r2, r1     @ Same?
   1400     cbnz   r0,1f
   1401     bx     lr
   1402 1:                        @ Same strings, return.
   1403 
   1404     push {r4, r7-r12, lr} @ 8 words - keep alignment
   1405     .save {r4, r7-r12, lr}
   1406     .cfi_adjust_cfa_offset 32
   1407     .cfi_rel_offset r4, 0
   1408     .cfi_rel_offset r7, 4
   1409     .cfi_rel_offset r8, 8
   1410     .cfi_rel_offset r9, 12
   1411     .cfi_rel_offset r10, 16
   1412     .cfi_rel_offset r11, 20
   1413     .cfi_rel_offset r12, 24
   1414     .cfi_rel_offset lr, 28
   1415 
   1416     ldr    r4, [r2, #STRING_OFFSET_OFFSET]
   1417     ldr    r9, [r1, #STRING_OFFSET_OFFSET]
   1418     ldr    r7, [r2, #STRING_COUNT_OFFSET]
   1419     ldr    r10, [r1, #STRING_COUNT_OFFSET]
   1420     ldr    r2, [r2, #STRING_VALUE_OFFSET]
   1421     ldr    r1, [r1, #STRING_VALUE_OFFSET]
   1422 
   1423     /*
   1424      * At this point, we have:
   1425      *    value:  r2/r1
   1426      *    offset: r4/r9
   1427      *    count:  r7/r10
   1428      * We're going to compute
   1429      *    r11 <- countDiff
   1430      *    r10 <- minCount
   1431      */
   1432      subs  r11, r7, r10
   1433      it    ls
   1434      movls r10, r7
   1435 
   1436      /* Now, build pointers to the string data */
   1437      add   r2, r2, r4, lsl #1
   1438      add   r1, r1, r9, lsl #1
   1439      /*
   1440       * Note: data pointers point to previous element so we can use pre-index
   1441       * mode with base writeback.
   1442       */
   1443      add   r2, #STRING_DATA_OFFSET-2   @ offset to contents[-1]
   1444      add   r1, #STRING_DATA_OFFSET-2   @ offset to contents[-1]
   1445 
   1446      /*
   1447       * At this point we have:
   1448       *   r2: *this string data
   1449       *   r1: *comp string data
   1450       *   r10: iteration count for comparison
   1451       *   r11: value to return if the first part of the string is equal
   1452       *   r0: reserved for result
   1453       *   r3, r4, r7, r8, r9, r12 available for loading string data
   1454       */
   1455 
   1456     subs  r10, #2
   1457     blt   .Ldo_remainder2
   1458 
   1459       /*
   1460        * Unroll the first two checks so we can quickly catch early mismatch
   1461        * on long strings (but preserve incoming alignment)
   1462        */
   1463 
   1464     ldrh  r3, [r2, #2]!
   1465     ldrh  r4, [r1, #2]!
   1466     ldrh  r7, [r2, #2]!
   1467     ldrh  r8, [r1, #2]!
   1468     subs  r0, r3, r4
   1469     it    eq
   1470     subseq  r0, r7, r8
   1471     bne   .Ldone
   1472     cmp   r10, #28
   1473     bgt   .Ldo_memcmp16
   1474     subs  r10, #3
   1475     blt   .Ldo_remainder
   1476 
   1477 .Lloopback_triple:
   1478     ldrh  r3, [r2, #2]!
   1479     ldrh  r4, [r1, #2]!
   1480     ldrh  r7, [r2, #2]!
   1481     ldrh  r8, [r1, #2]!
   1482     ldrh  r9, [r2, #2]!
   1483     ldrh  r12,[r1, #2]!
   1484     subs  r0, r3, r4
   1485     it    eq
   1486     subseq  r0, r7, r8
   1487     it    eq
   1488     subseq  r0, r9, r12
   1489     bne   .Ldone
   1490     subs  r10, #3
   1491     bge   .Lloopback_triple
   1492 
   1493 .Ldo_remainder:
   1494     adds  r10, #3
   1495     beq   .Lreturn_diff
   1496 
   1497 .Lloopback_single:
   1498     ldrh  r3, [r2, #2]!
   1499     ldrh  r4, [r1, #2]!
   1500     subs  r0, r3, r4
   1501     bne   .Ldone
   1502     subs  r10, #1
   1503     bne   .Lloopback_single
   1504 
   1505 .Lreturn_diff:
   1506     mov   r0, r11
   1507     pop   {r4, r7-r12, pc}
   1508 
   1509 .Ldo_remainder2:
   1510     adds  r10, #2
   1511     bne   .Lloopback_single
   1512     mov   r0, r11
   1513     pop   {r4, r7-r12, pc}
   1514 
   1515     /* Long string case */
   1516 .Ldo_memcmp16:
   1517     mov   r7, r11
   1518     add   r0, r2, #2
   1519     add   r1, r1, #2
   1520     mov   r2, r10
   1521     bl    __memcmp16
   1522     cmp   r0, #0
   1523     it    eq
   1524     moveq r0, r7
   1525 .Ldone:
   1526     pop   {r4, r7-r12, pc}
   1527 END art_quick_string_compareto
   1528