Home | History | Annotate | Download | only in x86
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "asm_support_x86.S"
     18 #include "interpreter/cfi_asm_support.h"
     19 
     20 #include "arch/quick_alloc_entrypoints.S"
     21 
     22 // For x86, the CFA is esp+4, the address above the pushed return address on the stack.
     23 
     24     /*
     25      * Macro that sets up the callee save frame to conform with
     26      * Runtime::CreateCalleeSaveMethod(kSaveAllCalleeSaves)
     27      */
     28 MACRO2(SETUP_SAVE_ALL_CALLEE_SAVES_FRAME, got_reg, temp_reg)
     29     PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
     30     PUSH esi
     31     PUSH ebp
     32     subl MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
     33     CFI_ADJUST_CFA_OFFSET(12)
     34     SETUP_GOT_NOSAVE RAW_VAR(got_reg)
     35     // Load Runtime::instance_ from GOT.
     36     movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
     37     movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
     38     // Push save all callee-save method.
     39     pushl RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET(REG_VAR(temp_reg))
     40     CFI_ADJUST_CFA_OFFSET(4)
     41     // Store esp as the top quick frame.
     42     movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
     43     // Ugly compile-time check, but we only have the preprocessor.
     44     // Last +4: implicit return address pushed on stack when caller made call.
     45 #if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 3*4 + 16 + 4)
     46 #error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(X86) size not as expected."
     47 #endif
     48 END_MACRO
     49 
     50     /*
     51      * Macro that sets up the callee save frame to conform with
     52      * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly)
     53      */
     54 MACRO2(SETUP_SAVE_REFS_ONLY_FRAME, got_reg, temp_reg)
     55     PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
     56     PUSH esi
     57     PUSH ebp
     58     subl MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
     59     CFI_ADJUST_CFA_OFFSET(12)
     60     SETUP_GOT_NOSAVE RAW_VAR(got_reg)
     61     // Load Runtime::instance_ from GOT.
     62     movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
     63     movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
     64     // Push save all callee-save method.
     65     pushl RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET(REG_VAR(temp_reg))
     66     CFI_ADJUST_CFA_OFFSET(4)
     67     // Store esp as the top quick frame.
     68     movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
     69 
     70     // Ugly compile-time check, but we only have the preprocessor.
     71     // Last +4: implicit return address pushed on stack when caller made call.
     72 #if (FRAME_SIZE_SAVE_REFS_ONLY != 3*4 + 16 + 4)
     73 #error "FRAME_SIZE_SAVE_REFS_ONLY(X86) size not as expected."
     74 #endif
     75 END_MACRO
     76 
     77     /*
     78      * Macro that sets up the callee save frame to conform with
     79      * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly)
     80      * and preserves the value of got_reg at entry.
     81      */
     82 MACRO2(SETUP_SAVE_REFS_ONLY_FRAME_PRESERVE_GOT_REG, got_reg, temp_reg)
     83     PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
     84     PUSH esi
     85     PUSH ebp
     86     PUSH RAW_VAR(got_reg)  // Save got_reg
     87     subl MACRO_LITERAL(8), %esp  // Grow stack by 2 words.
     88     CFI_ADJUST_CFA_OFFSET(8)
     89 
     90     SETUP_GOT_NOSAVE RAW_VAR(got_reg)
     91     // Load Runtime::instance_ from GOT.
     92     movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
     93     movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
     94     // Push save all callee-save method.
     95     pushl RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET(REG_VAR(temp_reg))
     96     CFI_ADJUST_CFA_OFFSET(4)
     97     // Store esp as the top quick frame.
     98     movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
     99     // Restore got_reg.
    100     movl 12(%esp), REG_VAR(got_reg)
    101     CFI_RESTORE(RAW_VAR(got_reg))
    102 
    103     // Ugly compile-time check, but we only have the preprocessor.
    104     // Last +4: implicit return address pushed on stack when caller made call.
    105 #if (FRAME_SIZE_SAVE_REFS_ONLY != 3*4 + 16 + 4)
    106 #error "FRAME_SIZE_SAVE_REFS_ONLY(X86) size not as expected."
    107 #endif
    108 END_MACRO
    109 
    110 MACRO0(RESTORE_SAVE_REFS_ONLY_FRAME)
    111     addl MACRO_LITERAL(16), %esp  // Unwind stack up to saved values
    112     CFI_ADJUST_CFA_OFFSET(-16)
    113     POP ebp  // Restore callee saves (ebx is saved/restored by the upcall)
    114     POP esi
    115     POP edi
    116 END_MACRO
    117 
    118     /*
    119      * Macro that sets up the callee save frame to conform with
    120      * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs)
    121      */
    122 MACRO2(SETUP_SAVE_REFS_AND_ARGS_FRAME, got_reg, temp_reg)
    123     PUSH edi  // Save callee saves
    124     PUSH esi
    125     PUSH ebp
    126     PUSH ebx  // Save args
    127     PUSH edx
    128     PUSH ecx
    129     // Create space for FPR args.
    130     subl MACRO_LITERAL(4 * 8), %esp
    131     CFI_ADJUST_CFA_OFFSET(4 * 8)
    132     // Save FPRs.
    133     movsd %xmm0, 0(%esp)
    134     movsd %xmm1, 8(%esp)
    135     movsd %xmm2, 16(%esp)
    136     movsd %xmm3, 24(%esp)
    137 
    138     SETUP_GOT_NOSAVE RAW_VAR(got_reg)
    139     // Load Runtime::instance_ from GOT.
    140     movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
    141     movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
    142     // Push save all callee-save method.
    143     pushl RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET(REG_VAR(temp_reg))
    144     CFI_ADJUST_CFA_OFFSET(4)
    145     // Store esp as the stop quick frame.
    146     movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
    147 
    148     // Ugly compile-time check, but we only have the preprocessor.
    149     // Last +4: implicit return address pushed on stack when caller made call.
    150 #if (FRAME_SIZE_SAVE_REFS_AND_ARGS != 7*4 + 4*8 + 4)
    151 #error "FRAME_SIZE_SAVE_REFS_AND_ARGS(X86) size not as expected."
    152 #endif
    153 END_MACRO
    154 
    155     /*
    156      * Macro that sets up the callee save frame to conform with
    157      * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs) where the method is passed in EAX.
    158      */
    159 MACRO0(SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_EAX)
    160     // Save callee and GPR args, mixed together to agree with core spills bitmap.
    161     PUSH edi  // Save callee saves
    162     PUSH esi
    163     PUSH ebp
    164     PUSH ebx  // Save args
    165     PUSH edx
    166     PUSH ecx
    167 
    168     // Create space for FPR args.
    169     subl MACRO_LITERAL(32), %esp
    170     CFI_ADJUST_CFA_OFFSET(32)
    171 
    172     // Save FPRs.
    173     movsd %xmm0, 0(%esp)
    174     movsd %xmm1, 8(%esp)
    175     movsd %xmm2, 16(%esp)
    176     movsd %xmm3, 24(%esp)
    177 
    178     PUSH eax  // Store the ArtMethod reference at the bottom of the stack.
    179     // Store esp as the stop quick frame.
    180     movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
    181 END_MACRO
    182 
    183 MACRO0(RESTORE_SAVE_REFS_AND_ARGS_FRAME)
    184     // Restore FPRs. EAX is still on the stack.
    185     movsd 4(%esp), %xmm0
    186     movsd 12(%esp), %xmm1
    187     movsd 20(%esp), %xmm2
    188     movsd 28(%esp), %xmm3
    189 
    190     addl MACRO_LITERAL(36), %esp  // Remove FPRs and EAX.
    191     CFI_ADJUST_CFA_OFFSET(-36)
    192 
    193     POP ecx                       // Restore args except eax
    194     POP edx
    195     POP ebx
    196     POP ebp                       // Restore callee saves
    197     POP esi
    198     POP edi
    199 END_MACRO
    200 
    201 // Restore register and jump to routine
    202 // Inputs:  EDI contains pointer to code.
    203 // Notes: Need to pop EAX too (restores Method*)
    204 MACRO0(RESTORE_SAVE_REFS_AND_ARGS_FRAME_AND_JUMP)
    205     POP eax  // Restore Method*
    206 
    207     // Restore FPRs.
    208     movsd 0(%esp), %xmm0
    209     movsd 8(%esp), %xmm1
    210     movsd 16(%esp), %xmm2
    211     movsd 24(%esp), %xmm3
    212 
    213     addl MACRO_LITERAL(32), %esp  // Remove FPRs.
    214     CFI_ADJUST_CFA_OFFSET(-32)
    215 
    216     POP ecx  // Restore args except eax
    217     POP edx
    218     POP ebx
    219     POP ebp  // Restore callee saves
    220     POP esi
    221     xchgl 0(%esp),%edi // restore EDI and place code pointer as only value on stack
    222     ret
    223 END_MACRO
    224 
    225     /*
    226      * Macro that sets up the callee save frame to conform with
    227      * Runtime::CreateCalleeSaveMethod(kSaveEverything)
    228      * when EDI and ESI are already saved.
    229      */
    230 MACRO3(SETUP_SAVE_EVERYTHING_FRAME_EDI_ESI_SAVED, got_reg, temp_reg, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
    231     // Save core registers from highest to lowest to agree with core spills bitmap.
    232     // EDI and ESI, or at least placeholders for them, are already on the stack.
    233     PUSH ebp
    234     PUSH ebx
    235     PUSH edx
    236     PUSH ecx
    237     PUSH eax
    238     // Create space for FPR registers and stack alignment padding.
    239     subl MACRO_LITERAL(12 + 8 * 8), %esp
    240     CFI_ADJUST_CFA_OFFSET(12 + 8 * 8)
    241     // Save FPRs.
    242     movsd %xmm0, 12(%esp)
    243     movsd %xmm1, 20(%esp)
    244     movsd %xmm2, 28(%esp)
    245     movsd %xmm3, 36(%esp)
    246     movsd %xmm4, 44(%esp)
    247     movsd %xmm5, 52(%esp)
    248     movsd %xmm6, 60(%esp)
    249     movsd %xmm7, 68(%esp)
    250 
    251     SETUP_GOT_NOSAVE RAW_VAR(got_reg)
    252     // Load Runtime::instance_ from GOT.
    253     movl SYMBOL(_ZN3art7Runtime9instance_E)@GOT(REG_VAR(got_reg)), REG_VAR(temp_reg)
    254     movl (REG_VAR(temp_reg)), REG_VAR(temp_reg)
    255     // Push save everything callee-save method.
    256     pushl \runtime_method_offset(REG_VAR(temp_reg))
    257     CFI_ADJUST_CFA_OFFSET(4)
    258     // Store esp as the stop quick frame.
    259     movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
    260 
    261     // Ugly compile-time check, but we only have the preprocessor.
    262     // Last +4: implicit return address pushed on stack when caller made call.
    263 #if (FRAME_SIZE_SAVE_EVERYTHING != 7*4 + 8*8 + 12 + 4 + 4)
    264 #error "FRAME_SIZE_SAVE_EVERYTHING(X86) size not as expected."
    265 #endif
    266 END_MACRO
    267 
    268     /*
    269      * Macro that sets up the callee save frame to conform with
    270      * Runtime::CreateCalleeSaveMethod(kSaveEverything)
    271      * when EDI is already saved.
    272      */
    273 MACRO3(SETUP_SAVE_EVERYTHING_FRAME_EDI_SAVED, got_reg, temp_reg, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
    274     // Save core registers from highest to lowest to agree with core spills bitmap.
    275     // EDI, or at least a placeholder for it, is already on the stack.
    276     PUSH esi
    277     SETUP_SAVE_EVERYTHING_FRAME_EDI_ESI_SAVED RAW_VAR(got_reg), RAW_VAR(temp_reg), \runtime_method_offset
    278 END_MACRO
    279 
    280     /*
    281      * Macro that sets up the callee save frame to conform with
    282      * Runtime::CreateCalleeSaveMethod(kSaveEverything)
    283      */
    284 MACRO3(SETUP_SAVE_EVERYTHING_FRAME, got_reg, temp_reg, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
    285     PUSH edi
    286     SETUP_SAVE_EVERYTHING_FRAME_EDI_SAVED RAW_VAR(got_reg), RAW_VAR(temp_reg), \runtime_method_offset
    287 END_MACRO
    288 
    289 MACRO0(RESTORE_SAVE_EVERYTHING_FRAME_FRPS)
    290     // Restore FPRs. Method and padding is still on the stack.
    291     movsd 16(%esp), %xmm0
    292     movsd 24(%esp), %xmm1
    293     movsd 32(%esp), %xmm2
    294     movsd 40(%esp), %xmm3
    295     movsd 48(%esp), %xmm4
    296     movsd 56(%esp), %xmm5
    297     movsd 64(%esp), %xmm6
    298     movsd 72(%esp), %xmm7
    299 END_MACRO
    300 
    301 MACRO0(RESTORE_SAVE_EVERYTHING_FRAME_GPRS_EXCEPT_EAX)
    302     // Restore core registers (except eax).
    303     POP ecx
    304     POP edx
    305     POP ebx
    306     POP ebp
    307     POP esi
    308     POP edi
    309 END_MACRO
    310 
    311 MACRO0(RESTORE_SAVE_EVERYTHING_FRAME)
    312     RESTORE_SAVE_EVERYTHING_FRAME_FRPS
    313 
    314     // Remove save everything callee save method, stack alignment padding and FPRs.
    315     addl MACRO_LITERAL(16 + 8 * 8), %esp
    316     CFI_ADJUST_CFA_OFFSET(-(16 + 8 * 8))
    317 
    318     POP eax
    319     RESTORE_SAVE_EVERYTHING_FRAME_GPRS_EXCEPT_EAX
    320 END_MACRO
    321 
    322 MACRO0(RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX)
    323     RESTORE_SAVE_EVERYTHING_FRAME_FRPS
    324 
    325     // Remove save everything callee save method, stack alignment padding and FPRs, skip EAX.
    326     addl MACRO_LITERAL(16 + 8 * 8 + 4), %esp
    327     CFI_ADJUST_CFA_OFFSET(-(16 + 8 * 8 + 4))
    328 
    329     RESTORE_SAVE_EVERYTHING_FRAME_GPRS_EXCEPT_EAX
    330 END_MACRO
    331 
    332     /*
    333      * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
    334      * exception is Thread::Current()->exception_ when the runtime method frame is ready.
    335      */
    336 MACRO0(DELIVER_PENDING_EXCEPTION_FRAME_READY)
    337     // Outgoing argument set up
    338     subl MACRO_LITERAL(12), %esp               // alignment padding
    339     CFI_ADJUST_CFA_OFFSET(12)
    340     pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
    341     CFI_ADJUST_CFA_OFFSET(4)
    342     call SYMBOL(artDeliverPendingExceptionFromCode)  // artDeliverPendingExceptionFromCode(Thread*)
    343     UNREACHABLE
    344 END_MACRO
    345 
    346     /*
    347      * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
    348      * exception is Thread::Current()->exception_.
    349      */
    350 MACRO0(DELIVER_PENDING_EXCEPTION)
    351     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save callee saves for throw
    352     DELIVER_PENDING_EXCEPTION_FRAME_READY
    353 END_MACRO
    354 
    355 MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
    356     DEFINE_FUNCTION VAR(c_name)
    357     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save all registers as basis for long jump context
    358     // Outgoing argument set up
    359     subl MACRO_LITERAL(12), %esp               // alignment padding
    360     CFI_ADJUST_CFA_OFFSET(12)
    361     pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
    362     CFI_ADJUST_CFA_OFFSET(4)
    363     call CALLVAR(cxx_name)                     // cxx_name(Thread*)
    364     UNREACHABLE
    365     END_FUNCTION VAR(c_name)
    366 END_MACRO
    367 
    368 MACRO2(NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING, c_name, cxx_name)
    369     DEFINE_FUNCTION VAR(c_name)
    370     SETUP_SAVE_EVERYTHING_FRAME ebx, ebx       // save all registers as basis for long jump context
    371     // Outgoing argument set up
    372     subl MACRO_LITERAL(12), %esp               // alignment padding
    373     CFI_ADJUST_CFA_OFFSET(12)
    374     pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
    375     CFI_ADJUST_CFA_OFFSET(4)
    376     call CALLVAR(cxx_name)                     // cxx_name(Thread*)
    377     UNREACHABLE
    378     END_FUNCTION VAR(c_name)
    379 END_MACRO
    380 
    381 MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
    382     DEFINE_FUNCTION VAR(c_name)
    383     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save all registers as basis for long jump context
    384     // Outgoing argument set up
    385     subl MACRO_LITERAL(8), %esp                // alignment padding
    386     CFI_ADJUST_CFA_OFFSET(8)
    387     pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
    388     CFI_ADJUST_CFA_OFFSET(4)
    389     PUSH eax                                   // pass arg1
    390     call CALLVAR(cxx_name)                     // cxx_name(arg1, Thread*)
    391     UNREACHABLE
    392     END_FUNCTION VAR(c_name)
    393 END_MACRO
    394 
    395 MACRO2(TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING, c_name, cxx_name)
    396     DEFINE_FUNCTION VAR(c_name)
    397     SETUP_SAVE_EVERYTHING_FRAME ebx, ebx       // save all registers as basis for long jump context
    398     // Outgoing argument set up
    399     PUSH eax                                   // alignment padding
    400     pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
    401     CFI_ADJUST_CFA_OFFSET(4)
    402     PUSH ecx                                   // pass arg2
    403     PUSH eax                                   // pass arg1
    404     call CALLVAR(cxx_name)                     // cxx_name(arg1, arg2, Thread*)
    405     UNREACHABLE
    406     END_FUNCTION VAR(c_name)
    407 END_MACRO
    408 
    409     /*
    410      * Called by managed code to create and deliver a NullPointerException.
    411      */
    412 NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
    413 
    414     /*
    415      * Call installed by a signal handler to create and deliver a NullPointerException.
    416      */
    417 DEFINE_FUNCTION_CUSTOM_CFA art_quick_throw_null_pointer_exception_from_signal, 2 * __SIZEOF_POINTER__
    418     // Fault address and return address were saved by the fault handler.
    419     // Save all registers as basis for long jump context; EDI will replace fault address later.
    420     SETUP_SAVE_EVERYTHING_FRAME_EDI_SAVED ebx, ebx
    421     // Retrieve fault address and save EDI.
    422     movl (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__)(%esp), %eax
    423     movl %edi, (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__)(%esp)
    424     CFI_REL_OFFSET(%edi, (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__))
    425     // Outgoing argument set up
    426     subl MACRO_LITERAL(8), %esp                           // alignment padding
    427     CFI_ADJUST_CFA_OFFSET(8)
    428     pushl %fs:THREAD_SELF_OFFSET                          // pass Thread::Current()
    429     CFI_ADJUST_CFA_OFFSET(4)
    430     PUSH eax                                              // pass arg1
    431     call SYMBOL(artThrowNullPointerExceptionFromSignal)   // (addr, self)
    432     UNREACHABLE
    433 END_FUNCTION art_quick_throw_null_pointer_exception
    434 
    435     /*
    436      * Called by managed code to create and deliver an ArithmeticException.
    437      */
    438 NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_div_zero, artThrowDivZeroFromCode
    439 
    440     /*
    441      * Called by managed code to create and deliver a StackOverflowError.
    442      */
    443 NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
    444 
    445     /*
    446      * Called by managed code, saves callee saves and then calls artThrowException
    447      * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
    448      */
    449 ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
    450 
    451     /*
    452      * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
    453      * index, arg2 holds limit.
    454      */
    455 TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
    456 
    457     /*
    458      * Called by managed code to create and deliver a StringIndexOutOfBoundsException
    459      * as if thrown from a call to String.charAt(). Arg1 holds index, arg2 holds limit.
    460      */
    461 TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_string_bounds, artThrowStringBoundsFromCode
    462 
    463     /*
    464      * All generated callsites for interface invokes and invocation slow paths will load arguments
    465      * as usual - except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
    466      * the method_idx.  This wrapper will save arg1-arg3 and call the appropriate C helper.
    467      * NOTE: "this" is first visible argument of the target, and so can be found in arg1/r1.
    468      *
    469      * The helper will attempt to locate the target and return a 64-bit result in r0/r1 consisting
    470      * of the target Method* in r0 and method->code_ in r1.
    471      *
    472      * If unsuccessful, the helper will return null/null and there will be a pending exception in the
    473      * thread and we branch to another stub to deliver it.
    474      *
    475      * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
    476      * pointing back to the original caller.
    477      */
    478 MACRO1(INVOKE_TRAMPOLINE_BODY, cxx_name)
    479     SETUP_SAVE_REFS_AND_ARGS_FRAME ebx, ebx
    480     movl %esp, %edx  // remember SP
    481 
    482     // Outgoing argument set up
    483     PUSH edx                      // pass SP
    484     pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
    485     CFI_ADJUST_CFA_OFFSET(4)
    486     PUSH ecx                      // pass arg2
    487     PUSH eax                      // pass arg1
    488     call CALLVAR(cxx_name)        // cxx_name(arg1, arg2, Thread*, SP)
    489     movl %edx, %edi               // save code pointer in EDI
    490     addl MACRO_LITERAL(20), %esp  // Pop arguments skip eax
    491     CFI_ADJUST_CFA_OFFSET(-20)
    492 
    493     // Restore FPRs.
    494     movsd 0(%esp), %xmm0
    495     movsd 8(%esp), %xmm1
    496     movsd 16(%esp), %xmm2
    497     movsd 24(%esp), %xmm3
    498 
    499     // Remove space for FPR args.
    500     addl MACRO_LITERAL(4 * 8), %esp
    501     CFI_ADJUST_CFA_OFFSET(-4 * 8)
    502 
    503     POP ecx  // Restore args except eax
    504     POP edx
    505     POP ebx
    506     POP ebp  // Restore callee saves
    507     POP esi
    508     // Swap EDI callee save with code pointer.
    509     xchgl %edi, (%esp)
    510     testl %eax, %eax              // Branch forward if exception pending.
    511     jz    1f
    512     // Tail call to intended method.
    513     ret
    514 1:
    515     addl MACRO_LITERAL(4), %esp   // Pop code pointer off stack
    516     CFI_ADJUST_CFA_OFFSET(-4)
    517     DELIVER_PENDING_EXCEPTION
    518 END_MACRO
    519 MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name)
    520     DEFINE_FUNCTION VAR(c_name)
    521     INVOKE_TRAMPOLINE_BODY RAW_VAR(cxx_name)
    522     END_FUNCTION VAR(c_name)
    523 END_MACRO
    524 
    525 INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
    526 
    527 INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
    528 INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
    529 INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
    530 INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
    531 
    532     /*
    533      * Helper for quick invocation stub to set up XMM registers.
    534      * Increments shorty and arg_array and clobbers temp_char.
    535      * Branches to finished if it encounters the end of the shorty.
    536      */
    537 MACRO5(LOOP_OVER_SHORTY_LOADING_XMMS, xmm_reg, shorty, arg_array, temp_char, finished)
    538 1: // LOOP
    539     movb (REG_VAR(shorty)), REG_VAR(temp_char)     // temp_char := *shorty
    540     addl MACRO_LITERAL(1), REG_VAR(shorty)         // shorty++
    541     cmpb MACRO_LITERAL(0), REG_VAR(temp_char)      // if (temp_char == '\0')
    542     je VAR(finished)                               //   goto finished
    543     cmpb MACRO_LITERAL(68), REG_VAR(temp_char)     // if (temp_char == 'D')
    544     je 2f                                          //   goto FOUND_DOUBLE
    545     cmpb MACRO_LITERAL(70), REG_VAR(temp_char)     // if (temp_char == 'F')
    546     je 3f                                          //   goto FOUND_FLOAT
    547     addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
    548     //  Handle extra space in arg array taken by a long.
    549     cmpb MACRO_LITERAL(74), REG_VAR(temp_char)     // if (temp_char != 'J')
    550     jne 1b                                         //   goto LOOP
    551     addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
    552     jmp 1b                                         // goto LOOP
    553 2:  // FOUND_DOUBLE
    554     movsd (REG_VAR(arg_array)), REG_VAR(xmm_reg)
    555     addl MACRO_LITERAL(8), REG_VAR(arg_array)      // arg_array+=2
    556     jmp 4f
    557 3:  // FOUND_FLOAT
    558     movss (REG_VAR(arg_array)), REG_VAR(xmm_reg)
    559     addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
    560 4:
    561 END_MACRO
    562 
    563     /*
    564      * Helper for quick invocation stub to set up GPR registers.
    565      * Increments shorty and arg_array, and returns the current short character in
    566      * temp_char. Branches to finished if it encounters the end of the shorty.
    567      */
    568 MACRO4(SKIP_OVER_FLOATS, shorty, arg_array, temp_char, finished)
    569 1: // LOOP:
    570     movb (REG_VAR(shorty)), REG_VAR(temp_char)     // temp_char := *shorty
    571     addl MACRO_LITERAL(1), REG_VAR(shorty)         // shorty++
    572     cmpb MACRO_LITERAL(0), REG_VAR(temp_char)      // if (temp_char == '\0')
    573     je VAR(finished)                               //   goto finished
    574     cmpb MACRO_LITERAL(70), REG_VAR(temp_char)     // if (temp_char == 'F')
    575     je 3f                                          //   goto SKIP_FLOAT
    576     cmpb MACRO_LITERAL(68), REG_VAR(temp_char)     // if (temp_char == 'D')
    577     je 4f                                          //   goto SKIP_DOUBLE
    578     jmp 5f                                         // goto end
    579 3:  // SKIP_FLOAT
    580     addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
    581     jmp 1b                                         // goto LOOP
    582 4:  // SKIP_DOUBLE
    583     addl MACRO_LITERAL(8), REG_VAR(arg_array)      // arg_array+=2
    584     jmp 1b                                         // goto LOOP
    585 5:
    586 END_MACRO
    587 
    588   /*
    589      * Quick invocation stub (non-static).
    590      * On entry:
    591      *   [sp] = return address
    592      *   [sp + 4] = method pointer
    593      *   [sp + 8] = argument array or null for no argument methods
    594      *   [sp + 12] = size of argument array in bytes
    595      *   [sp + 16] = (managed) thread pointer
    596      *   [sp + 20] = JValue* result
    597      *   [sp + 24] = shorty
    598      */
    599 DEFINE_FUNCTION art_quick_invoke_stub
    600     // Save the non-volatiles.
    601     PUSH ebp                      // save ebp
    602     PUSH ebx                      // save ebx
    603     PUSH esi                      // save esi
    604     PUSH edi                      // save edi
    605     // Set up argument XMM registers.
    606     mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
    607     addl LITERAL(1), %esi
    608     mov 8+16(%esp), %edi          // EDI := arg_array + 4 ; ie skip this pointer.
    609     addl LITERAL(4), %edi
    610     // Clobbers ESI, EDI, EAX.
    611     LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished
    612     LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished
    613     LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished
    614     LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished
    615     .balign 16
    616 .Lxmm_setup_finished:
    617     mov %esp, %ebp                // copy value of stack pointer into base pointer
    618     CFI_DEF_CFA_REGISTER(ebp)
    619     mov 28(%ebp), %ebx            // get arg array size
    620     // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
    621     addl LITERAL(36), %ebx
    622     // align frame size to 16 bytes
    623     andl LITERAL(0xFFFFFFF0), %ebx
    624     subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
    625     subl %ebx, %esp               // reserve stack space for argument array
    626 
    627     movl LITERAL(0), (%esp)       // store null for method*
    628 
    629     // Copy arg array into stack.
    630     movl 28(%ebp), %ecx           // ECX = size of args
    631     movl 24(%ebp), %esi           // ESI = argument array
    632     leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
    633     rep movsb                     // while (ecx--) { *edi++ = *esi++ }
    634 
    635     mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
    636     addl LITERAL(1), %esi
    637     mov 24(%ebp), %edi            // EDI := arg_array
    638     mov 0(%edi), %ecx             // ECX := this pointer
    639     addl LITERAL(4), %edi         // EDI := arg_array + 4 ; ie skip this pointer.
    640 
    641     // Enumerate the possible cases for loading GPRS.
    642     // edx (and maybe ebx):
    643     SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
    644     cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
    645     je .LfirstLong
    646     // Must be an integer value.
    647     movl (%edi), %edx
    648     addl LITERAL(4), %edi         // arg_array++
    649 
    650     // Now check ebx
    651     SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
    652     // Must be first word of a long, or an integer. First word of long doesn't
    653     // go into EBX, but can be loaded there anyways, as it is harmless.
    654     movl (%edi), %ebx
    655     jmp .Lgpr_setup_finished
    656 .LfirstLong:
    657     movl (%edi), %edx
    658     movl 4(%edi), %ebx
    659     // Nothing left to load.
    660 .Lgpr_setup_finished:
    661     mov 20(%ebp), %eax            // move method pointer into eax
    662     call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
    663     mov %ebp, %esp                // restore stack pointer
    664     CFI_DEF_CFA_REGISTER(esp)
    665     POP edi                       // pop edi
    666     POP esi                       // pop esi
    667     POP ebx                       // pop ebx
    668     POP ebp                       // pop ebp
    669     mov 20(%esp), %ecx            // get result pointer
    670     mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
    671     mov %edx, 4(%ecx)             // store the other half of the result
    672     mov 24(%esp), %edx            // get the shorty
    673     cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
    674     je .Lreturn_double_quick
    675     cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
    676     je .Lreturn_float_quick
    677     ret
    678 .Lreturn_double_quick:
    679     movsd %xmm0, (%ecx)           // store the floating point result
    680     ret
    681 .Lreturn_float_quick:
    682     movss %xmm0, (%ecx)           // store the floating point result
    683     ret
    684 END_FUNCTION art_quick_invoke_stub
    685 
    686   /*
    687      * Quick invocation stub (static).
    688      * On entry:
    689      *   [sp] = return address
    690      *   [sp + 4] = method pointer
    691      *   [sp + 8] = argument array or null for no argument methods
    692      *   [sp + 12] = size of argument array in bytes
    693      *   [sp + 16] = (managed) thread pointer
    694      *   [sp + 20] = JValue* result
    695      *   [sp + 24] = shorty
    696      */
    697 DEFINE_FUNCTION art_quick_invoke_static_stub
    698     // Save the non-volatiles.
    699     PUSH ebp                      // save ebp
    700     PUSH ebx                      // save ebx
    701     PUSH esi                      // save esi
    702     PUSH edi                      // save edi
    703     // Set up argument XMM registers.
    704     mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
    705     addl LITERAL(1), %esi
    706     mov 8+16(%esp), %edi          // EDI := arg_array
    707     // Clobbers ESI, EDI, EAX.
    708     LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished2
    709     LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished2
    710     LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished2
    711     LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished2
    712     .balign 16
    713 .Lxmm_setup_finished2:
    714     mov %esp, %ebp                // copy value of stack pointer into base pointer
    715     CFI_DEF_CFA_REGISTER(ebp)
    716     mov 28(%ebp), %ebx            // get arg array size
    717     // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
    718     addl LITERAL(36), %ebx
    719     // align frame size to 16 bytes
    720     andl LITERAL(0xFFFFFFF0), %ebx
    721     subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
    722     subl %ebx, %esp               // reserve stack space for argument array
    723 
    724     movl LITERAL(0), (%esp)       // store null for method*
    725 
    726     // Copy arg array into stack.
    727     movl 28(%ebp), %ecx           // ECX = size of args
    728     movl 24(%ebp), %esi           // ESI = argument array
    729     leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
    730     rep movsb                     // while (ecx--) { *edi++ = *esi++ }
    731 
    732     mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
    733     addl LITERAL(1), %esi
    734     mov 24(%ebp), %edi            // EDI := arg_array
    735 
    736     // Enumerate the possible cases for loading GPRS.
    737     // ecx (and maybe edx)
    738     SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
    739     cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
    740     je .LfirstLong2
    741     // Must be an integer value.  Load into ECX.
    742     movl (%edi), %ecx
    743     addl LITERAL(4), %edi         // arg_array++
    744 
    745     // Now check edx (and maybe ebx).
    746     SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
    747     cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
    748     je .LSecondLong2
    749     // Must be an integer.  Load into EDX.
    750     movl (%edi), %edx
    751     addl LITERAL(4), %edi         // arg_array++
    752 
    753     // Is there anything for ebx?
    754     SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
    755     // Must be first word of a long, or an integer. First word of long doesn't
    756     // go into EBX, but can be loaded there anyways, as it is harmless.
    757     movl (%edi), %ebx
    758     jmp .Lgpr_setup_finished2
    759 .LSecondLong2:
    760     // EDX:EBX is long.  That is all.
    761     movl (%edi), %edx
    762     movl 4(%edi), %ebx
    763     jmp .Lgpr_setup_finished2
    764 .LfirstLong2:
    765     // ECX:EDX is a long
    766     movl (%edi), %ecx
    767     movl 4(%edi), %edx
    768     addl LITERAL(8), %edi         // arg_array += 2
    769 
    770     // Anything for EBX?
    771     SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
    772     // Must be first word of a long, or an integer. First word of long doesn't
    773     // go into EBX, but can be loaded there anyways, as it is harmless.
    774     movl (%edi), %ebx
    775     jmp .Lgpr_setup_finished2
    776     // Nothing left to load.
    777 .Lgpr_setup_finished2:
    778     mov 20(%ebp), %eax            // move method pointer into eax
    779     call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
    780     mov %ebp, %esp                // restore stack pointer
    781     CFI_DEF_CFA_REGISTER(esp)
    782     POP edi                       // pop edi
    783     POP esi                       // pop esi
    784     POP ebx                       // pop ebx
    785     POP ebp                       // pop ebp
    786     mov 20(%esp), %ecx            // get result pointer
    787     mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
    788     mov %edx, 4(%ecx)             // store the other half of the result
    789     mov 24(%esp), %edx            // get the shorty
    790     cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
    791     je .Lreturn_double_quick2
    792     cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
    793     je .Lreturn_float_quick2
    794     ret
    795 .Lreturn_double_quick2:
    796     movsd %xmm0, (%ecx)           // store the floating point result
    797     ret
    798 .Lreturn_float_quick2:
    799     movss %xmm0, (%ecx)           // store the floating point result
    800     ret
    801 END_FUNCTION art_quick_invoke_static_stub
    802 
    803 MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
    804     DEFINE_FUNCTION VAR(c_name)
    805     SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx         // save ref containing registers for GC
    806     // Outgoing argument set up
    807     subl MACRO_LITERAL(8), %esp                  // push padding
    808     CFI_ADJUST_CFA_OFFSET(8)
    809     pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
    810     CFI_ADJUST_CFA_OFFSET(4)
    811     PUSH eax                                     // pass arg1
    812     call CALLVAR(cxx_name)                       // cxx_name(arg1, Thread*)
    813     addl MACRO_LITERAL(16), %esp                 // pop arguments
    814     CFI_ADJUST_CFA_OFFSET(-16)
    815     RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
    816     CALL_MACRO(return_macro)                     // return or deliver exception
    817     END_FUNCTION VAR(c_name)
    818 END_MACRO
    819 
    820 MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
    821     DEFINE_FUNCTION VAR(c_name)
    822     SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx         // save ref containing registers for GC
    823     // Outgoing argument set up
    824     PUSH eax                                     // push padding
    825     pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
    826     CFI_ADJUST_CFA_OFFSET(4)
    827     PUSH ecx                                     // pass arg2
    828     PUSH eax                                     // pass arg1
    829     call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, Thread*)
    830     addl MACRO_LITERAL(16), %esp                 // pop arguments
    831     CFI_ADJUST_CFA_OFFSET(-16)
    832     RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
    833     CALL_MACRO(return_macro)                     // return or deliver exception
    834     END_FUNCTION VAR(c_name)
    835 END_MACRO
    836 
    837 MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
    838     DEFINE_FUNCTION VAR(c_name)
    839     SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx         // save ref containing registers for GC
    840     // Outgoing argument set up
    841     pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
    842     CFI_ADJUST_CFA_OFFSET(4)
    843     PUSH edx                                     // pass arg3
    844     PUSH ecx                                     // pass arg2
    845     PUSH eax                                     // pass arg1
    846     call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, arg3, Thread*)
    847     addl MACRO_LITERAL(16), %esp                 // pop arguments
    848     CFI_ADJUST_CFA_OFFSET(-16)
    849     RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
    850     CALL_MACRO(return_macro)                     // return or deliver exception
    851     END_FUNCTION VAR(c_name)
    852 END_MACRO
    853 
    854 MACRO3(FOUR_ARG_DOWNCALL, c_name, cxx_name, return_macro)
    855     DEFINE_FUNCTION VAR(c_name)
    856     SETUP_SAVE_REFS_ONLY_FRAME_PRESERVE_GOT_REG ebx, ebx  // save ref containing registers for GC
    857 
    858     // Outgoing argument set up
    859     subl MACRO_LITERAL(12), %esp                 // alignment padding
    860     CFI_ADJUST_CFA_OFFSET(12)
    861     pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
    862     CFI_ADJUST_CFA_OFFSET(4)
    863     PUSH ebx                                     // pass arg4
    864     PUSH edx                                     // pass arg3
    865     PUSH ecx                                     // pass arg2
    866     PUSH eax                                     // pass arg1
    867     call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, arg3, arg4, Thread*)
    868     addl MACRO_LITERAL(32), %esp                 // pop arguments
    869     CFI_ADJUST_CFA_OFFSET(-32)
    870     RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
    871     CALL_MACRO(return_macro)                     // return or deliver exception
    872     END_FUNCTION VAR(c_name)
    873 END_MACRO
    874 
    875 MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
    876     DEFINE_FUNCTION VAR(c_name)
    877     SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx               // save ref containing registers for GC
    878     // Outgoing argument set up
    879     subl MACRO_LITERAL(8), %esp                       // alignment padding
    880     CFI_ADJUST_CFA_OFFSET(8)
    881     pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
    882     CFI_ADJUST_CFA_OFFSET(4)
    883     PUSH eax                                          // pass arg1
    884     call CALLVAR(cxx_name)                            // cxx_name(arg1, Thread*)
    885     addl MACRO_LITERAL(16), %esp                      // pop arguments
    886     CFI_ADJUST_CFA_OFFSET(-16)
    887     RESTORE_SAVE_REFS_ONLY_FRAME                      // restore frame up to return address
    888     CALL_MACRO(return_macro)                          // return or deliver exception
    889     END_FUNCTION VAR(c_name)
    890 END_MACRO
    891 
    892 MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
    893     DEFINE_FUNCTION VAR(c_name)
    894     SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx               // save ref containing registers for GC
    895     // Outgoing argument set up
    896     PUSH eax                                          // alignment padding
    897     pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
    898     CFI_ADJUST_CFA_OFFSET(4)
    899     PUSH ecx                                          // pass arg2
    900     PUSH eax                                          // pass arg1
    901     call CALLVAR(cxx_name)                            // cxx_name(arg1, arg2, referrer, Thread*)
    902     addl MACRO_LITERAL(16), %esp                      // pop arguments
    903     CFI_ADJUST_CFA_OFFSET(-16)
    904     RESTORE_SAVE_REFS_ONLY_FRAME                      // restore frame up to return address
    905     CALL_MACRO(return_macro)                          // return or deliver exception
    906     END_FUNCTION VAR(c_name)
    907 END_MACRO
    908 
    909 MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
    910     DEFINE_FUNCTION VAR(c_name)
    911     SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx               // save ref containing registers for GC
    912     // Outgoing argument set up
    913     pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
    914     CFI_ADJUST_CFA_OFFSET(4)
    915     PUSH edx                                          // pass arg3
    916     PUSH ecx                                          // pass arg2
    917     PUSH eax                                          // pass arg1
    918     call CALLVAR(cxx_name)                            // cxx_name(arg1, arg2, arg3, Thread*)
    919     addl LITERAL(16), %esp                            // pop arguments
    920     CFI_ADJUST_CFA_OFFSET(-32)
    921     RESTORE_SAVE_REFS_ONLY_FRAME                      // restore frame up to return address
    922     CALL_MACRO(return_macro)                          // return or deliver exception
    923     END_FUNCTION VAR(c_name)
    924 END_MACRO
    925 
    926 // Macro for string and type resolution and initialization.
    927 MACRO3(ONE_ARG_SAVE_EVERYTHING_DOWNCALL, c_name, cxx_name, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
    928     DEFINE_FUNCTION VAR(c_name)
    929     SETUP_SAVE_EVERYTHING_FRAME ebx, ebx, \runtime_method_offset  // save ref containing registers for GC
    930     // Outgoing argument set up
    931     subl MACRO_LITERAL(8), %esp                       // push padding
    932     CFI_ADJUST_CFA_OFFSET(8)
    933     pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
    934     CFI_ADJUST_CFA_OFFSET(4)
    935     PUSH eax                                          // pass arg1
    936     call CALLVAR(cxx_name)                            // cxx_name(arg1, Thread*)
    937     addl MACRO_LITERAL(16), %esp                      // pop arguments
    938     CFI_ADJUST_CFA_OFFSET(-16)
    939     testl %eax, %eax                                  // If result is null, deliver the OOME.
    940     jz 1f
    941     CFI_REMEMBER_STATE
    942     RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX            // restore frame up to return address
    943     ret                                               // return
    944     CFI_RESTORE_STATE
    945     CFI_DEF_CFA(esp, FRAME_SIZE_SAVE_EVERYTHING)      // workaround for clang bug: 31975598
    946 1:
    947     DELIVER_PENDING_EXCEPTION_FRAME_READY
    948     END_FUNCTION VAR(c_name)
    949 END_MACRO
    950 
    951 MACRO2(ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT, c_name, cxx_name)
    952     ONE_ARG_SAVE_EVERYTHING_DOWNCALL \c_name, \cxx_name, RUNTIME_SAVE_EVERYTHING_FOR_CLINIT_METHOD_OFFSET
    953 END_MACRO
    954 
    955 MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER)
    956     testl %eax, %eax               // eax == 0 ?
    957     jz  1f                         // if eax == 0 goto 1
    958     ret                            // return
    959 1:                                 // deliver exception on current thread
    960     DELIVER_PENDING_EXCEPTION
    961 END_MACRO
    962 
    963 MACRO0(RETURN_IF_EAX_ZERO)
    964     testl %eax, %eax               // eax == 0 ?
    965     jnz  1f                        // if eax != 0 goto 1
    966     ret                            // return
    967 1:                                 // deliver exception on current thread
    968     DELIVER_PENDING_EXCEPTION
    969 END_MACRO
    970 
    971 MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION)
    972     cmpl MACRO_LITERAL(0),%fs:THREAD_EXCEPTION_OFFSET // exception field == 0 ?
    973     jne 1f                                            // if exception field != 0 goto 1
    974     ret                                               // return
    975 1:                                                    // deliver exception on current thread
    976     DELIVER_PENDING_EXCEPTION
    977 END_MACRO
    978 
    979 // Generate the allocation entrypoints for each allocator.
    980 GENERATE_ALLOC_ENTRYPOINTS_FOR_NON_TLAB_ALLOCATORS
    981 
    982 // Comment out allocators that have x86 specific asm.
    983 // Region TLAB:
    984 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB)
    985 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab, RegionTLAB)
    986 GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
    987 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab, RegionTLAB)
    988 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(_region_tlab, RegionTLAB)
    989 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(_region_tlab, RegionTLAB)
    990 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(_region_tlab, RegionTLAB)
    991 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(_region_tlab, RegionTLAB)
    992 GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_region_tlab, RegionTLAB)
    993 GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_region_tlab, RegionTLAB)
    994 GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_region_tlab, RegionTLAB)
    995 // Normal TLAB:
    996 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB)
    997 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab, TLAB)
    998 GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab, TLAB)
    999 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab, TLAB)
   1000 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(_tlab, TLAB)
   1001 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(_tlab, TLAB)
   1002 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(_tlab, TLAB)
   1003 // GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(_tlab, TLAB)
   1004 GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_tlab, TLAB)
   1005 GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_tlab, TLAB)
   1006 GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_tlab, TLAB)
   1007 
   1008 // A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc).
   1009 MACRO2(ART_QUICK_ALLOC_OBJECT_ROSALLOC, c_name, cxx_name)
   1010     DEFINE_FUNCTION VAR(c_name)
   1011     // Fast path rosalloc allocation.
   1012     // eax: type/return value
   1013     // ecx, ebx, edx: free
   1014     movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
   1015                                                         // Check if the thread local allocation
   1016                                                         // stack has room
   1017     movl THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx), %ecx
   1018     cmpl THREAD_LOCAL_ALLOC_STACK_END_OFFSET(%ebx), %ecx
   1019     jae  .Lslow_path\c_name
   1020 
   1021     movl MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET(%eax), %ecx  // Load the object size (ecx)
   1022                                                         // Check if the size is for a thread
   1023                                                         // local allocation. Also does the
   1024                                                         // finalizable and initialization check.
   1025     cmpl LITERAL(ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE), %ecx
   1026     ja   .Lslow_path\c_name
   1027     shrl LITERAL(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT), %ecx // Calculate the rosalloc bracket index
   1028                                                             // from object size.
   1029                                                         // Load thread local rosalloc run (ebx)
   1030                                                         // Subtract __SIZEOF_POINTER__ to subtract
   1031                                                         // one from edi as there is no 0 byte run
   1032                                                         // and the size is already aligned.
   1033     movl (THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)(%ebx, %ecx, __SIZEOF_POINTER__), %ebx
   1034                                                         // Load free_list head (edi),
   1035                                                         // this will be the return value.
   1036     movl (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)(%ebx), %ecx
   1037     jecxz   .Lslow_path\c_name
   1038                                                         // Point of no slow path. Won't go to
   1039                                                         // the slow path from here on.
   1040                                                         // Load the next pointer of the head
   1041                                                         // and update head of free list with
   1042                                                         // next pointer
   1043     movl ROSALLOC_SLOT_NEXT_OFFSET(%ecx), %edx
   1044     movl %edx, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)(%ebx)
   1045                                                         // Decrement size of free list by 1
   1046     decl (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)(%ebx)
   1047                                                         // Store the class pointer in the
   1048                                                         // header. This also overwrites the
   1049                                                         // next pointer. The offsets are
   1050                                                         // asserted to match.
   1051 #if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET
   1052 #error "Class pointer needs to overwrite next pointer."
   1053 #endif
   1054     POISON_HEAP_REF eax
   1055     movl %eax, MIRROR_OBJECT_CLASS_OFFSET(%ecx)
   1056     movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
   1057                                                         // Push the new object onto the thread
   1058                                                         // local allocation stack and
   1059                                                         // increment the thread local
   1060                                                         // allocation stack top.
   1061     movl THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx), %eax
   1062     movl %ecx, (%eax)
   1063     addl LITERAL(COMPRESSED_REFERENCE_SIZE), %eax
   1064     movl %eax, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx)
   1065                                                         // No fence needed for x86.
   1066     movl %ecx, %eax                                     // Move object to return register
   1067     ret
   1068 .Lslow_path\c_name:
   1069     SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx          // save ref containing registers for GC
   1070     // Outgoing argument set up
   1071     subl LITERAL(8), %esp                       // alignment padding
   1072     pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
   1073     CFI_ADJUST_CFA_OFFSET(4)
   1074     PUSH eax
   1075     call SYMBOL(artAllocObjectFromCodeResolvedRosAlloc)  // cxx_name(arg0, Thread*)
   1076     addl LITERAL(16), %esp                       // pop arguments
   1077     CFI_ADJUST_CFA_OFFSET(-16)
   1078     RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
   1079     RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER      // return or deliver exception
   1080     END_FUNCTION VAR(c_name)
   1081 END_MACRO
   1082 
   1083 ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_resolved_rosalloc, artAllocObjectFromCodeResolvedRosAlloc
   1084 ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_initialized_rosalloc, artAllocObjectFromCodeInitializedRosAlloc
   1085 
   1086 // The common fast path code for art_quick_alloc_object_resolved/initialized_tlab
   1087 // and art_quick_alloc_object_resolved/initialized_region_tlab.
   1088 //
   1089 // EAX: type/return_value
   1090 MACRO1(ALLOC_OBJECT_RESOLVED_TLAB_FAST_PATH, slowPathLabel)
   1091     movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
   1092     movl THREAD_LOCAL_END_OFFSET(%ebx), %edi            // Load thread_local_end.
   1093     subl THREAD_LOCAL_POS_OFFSET(%ebx), %edi            // Compute the remaining buffer size.
   1094     movl MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET(%eax), %ecx  // Load the object size.
   1095     cmpl %edi, %ecx                                     // Check if it fits.
   1096     ja   VAR(slowPathLabel)
   1097     movl THREAD_LOCAL_POS_OFFSET(%ebx), %edx            // Load thread_local_pos
   1098                                                         // as allocated object.
   1099     addl %edx, %ecx                                     // Add the object size.
   1100     movl %ecx, THREAD_LOCAL_POS_OFFSET(%ebx)            // Update thread_local_pos.
   1101     incl THREAD_LOCAL_OBJECTS_OFFSET(%ebx)              // Increase thread_local_objects.
   1102                                                         // Store the class pointer in the header.
   1103                                                         // No fence needed for x86.
   1104     POISON_HEAP_REF eax
   1105     movl %eax, MIRROR_OBJECT_CLASS_OFFSET(%edx)
   1106     movl %edx, %eax
   1107     POP edi
   1108     ret                                                 // Fast path succeeded.
   1109 END_MACRO
   1110 
   1111 // The common slow path code for art_quick_alloc_object_resolved/initialized_tlab
   1112 // and art_quick_alloc_object_resolved/initialized_region_tlab.
   1113 MACRO1(ALLOC_OBJECT_RESOLVED_TLAB_SLOW_PATH, cxx_name)
   1114     POP edi
   1115     SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx                 // save ref containing registers for GC
   1116     // Outgoing argument set up
   1117     subl LITERAL(8), %esp                               // alignment padding
   1118     CFI_ADJUST_CFA_OFFSET(8)
   1119     pushl %fs:THREAD_SELF_OFFSET                        // pass Thread::Current()
   1120     CFI_ADJUST_CFA_OFFSET(4)
   1121     PUSH eax
   1122     call CALLVAR(cxx_name)                              // cxx_name(arg0, Thread*)
   1123     addl LITERAL(16), %esp
   1124     CFI_ADJUST_CFA_OFFSET(-16)
   1125     RESTORE_SAVE_REFS_ONLY_FRAME                        // restore frame up to return address
   1126     RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER             // return or deliver exception
   1127 END_MACRO
   1128 
   1129 MACRO2(ART_QUICK_ALLOC_OBJECT_TLAB, c_name, cxx_name)
   1130     DEFINE_FUNCTION VAR(c_name)
   1131     // Fast path tlab allocation.
   1132     // EAX: type
   1133     // EBX, ECX, EDX: free.
   1134     PUSH edi
   1135     ALLOC_OBJECT_RESOLVED_TLAB_FAST_PATH .Lslow_path\c_name
   1136 .Lslow_path\c_name:
   1137     ALLOC_OBJECT_RESOLVED_TLAB_SLOW_PATH RAW_VAR(cxx_name)
   1138     END_FUNCTION VAR(c_name)
   1139 END_MACRO
   1140 
   1141 ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_resolved_tlab, artAllocObjectFromCodeResolvedTLAB
   1142 ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_initialized_tlab, artAllocObjectFromCodeInitializedTLAB
   1143 ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_resolved_region_tlab, artAllocObjectFromCodeResolvedRegionTLAB
   1144 ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_initialized_region_tlab, artAllocObjectFromCodeInitializedRegionTLAB
   1145 
   1146 // The fast path code for art_quick_alloc_array_region_tlab.
   1147 // Inputs: EAX: the class, ECX: int32_t component_count, EDX: total_size
   1148 // Free temp: EBX
   1149 // Output: EAX: return value.
   1150 MACRO1(ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED_WITH_SIZE, slowPathLabel)
   1151     mov %fs:THREAD_SELF_OFFSET, %ebx                          // ebx = thread
   1152     // Mask out the unaligned part to make sure we are 8 byte aligned.
   1153     andl LITERAL(OBJECT_ALIGNMENT_MASK_TOGGLED), %edx
   1154     movl THREAD_LOCAL_END_OFFSET(%ebx), %edi
   1155     subl THREAD_LOCAL_POS_OFFSET(%ebx), %edi
   1156     cmpl %edi, %edx                                           // Check if it fits.
   1157     ja   RAW_VAR(slowPathLabel)
   1158     movl THREAD_LOCAL_POS_OFFSET(%ebx), %edi
   1159     addl %edi, %edx                                            // Add the object size.
   1160     movl %edx, THREAD_LOCAL_POS_OFFSET(%ebx)                   // Update thread_local_pos_
   1161     addl LITERAL(1), THREAD_LOCAL_OBJECTS_OFFSET(%ebx)         // Increase thread_local_objects.
   1162                                                                // Store the class pointer in the
   1163                                                                // header.
   1164                                                                // No fence needed for x86.
   1165     POISON_HEAP_REF eax
   1166     movl %eax, MIRROR_OBJECT_CLASS_OFFSET(%edi)
   1167     movl %ecx, MIRROR_ARRAY_LENGTH_OFFSET(%edi)
   1168     movl %edi, %eax
   1169     POP edi
   1170     ret                                                        // Fast path succeeded.
   1171 END_MACRO
   1172 
   1173 MACRO1(COMPUTE_ARRAY_SIZE_UNKNOWN, slow_path)
   1174     // We should never enter here. Code is provided for reference.
   1175     int3
   1176     // Possibly a large object, go slow.
   1177     // Also does negative array size check.
   1178     cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_WIDE_ARRAY_DATA_OFFSET) / 8), %ecx
   1179     ja RAW_VAR(slow_path)
   1180     PUSH ecx
   1181     movl %ecx, %edx
   1182     movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%eax), %ecx        // Load component type.
   1183     UNPOISON_HEAP_REF ecx
   1184     movl MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET(%ecx), %ecx // Load primitive type.
   1185     shr MACRO_LITERAL(PRIMITIVE_TYPE_SIZE_SHIFT_SHIFT), %ecx        // Get component size shift.
   1186     sall %cl, %edx                                              // Calculate array count shifted.
   1187     // Add array header + alignment rounding.
   1188     add MACRO_LITERAL(MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK), %edx
   1189     // Add 4 extra bytes if we are doing a long array.
   1190     add MACRO_LITERAL(1), %ecx
   1191     and MACRO_LITERAL(4), %ecx
   1192 #if MIRROR_WIDE_ARRAY_DATA_OFFSET != MIRROR_INT_ARRAY_DATA_OFFSET + 4
   1193 #error Long array data offset must be 4 greater than int array data offset.
   1194 #endif
   1195     addl %ecx, %edx
   1196     POP ecx
   1197 END_MACRO
   1198 
   1199 MACRO1(COMPUTE_ARRAY_SIZE_8, slow_path)
   1200     // EAX: mirror::Class* klass, ECX: int32_t component_count
   1201     // Possibly a large object, go slow.
   1202     // Also does negative array size check.
   1203     cmpl LITERAL(MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET), %ecx
   1204     ja RAW_VAR(slow_path)
   1205     // Add array header + alignment rounding.
   1206     leal (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)(%ecx), %edx
   1207 END_MACRO
   1208 
   1209 MACRO1(COMPUTE_ARRAY_SIZE_16, slow_path)
   1210     // EAX: mirror::Class* klass, ECX: int32_t component_count
   1211     // Possibly a large object, go slow.
   1212     // Also does negative array size check.
   1213     cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET) / 2), %ecx
   1214     ja RAW_VAR(slow_path)
   1215     // Add array header + alignment rounding.
   1216     leal ((MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) / 2)(%ecx), %edx
   1217     sall MACRO_LITERAL(1), %edx
   1218 END_MACRO
   1219 
   1220 MACRO1(COMPUTE_ARRAY_SIZE_32, slow_path)
   1221     // EAX: mirror::Class* klass, ECX: int32_t component_count
   1222     // Possibly a large object, go slow.
   1223     // Also does negative array size check.
   1224     cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET) / 4), %ecx
   1225     ja RAW_VAR(slow_path)
   1226     // Add array header + alignment rounding.
   1227     leal ((MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) / 4)(%ecx), %edx
   1228     sall MACRO_LITERAL(2), %edx
   1229 END_MACRO
   1230 
   1231 MACRO1(COMPUTE_ARRAY_SIZE_64, slow_path)
   1232     // EAX: mirror::Class* klass, ECX: int32_t component_count
   1233     // Possibly a large object, go slow.
   1234     // Also does negative array size check.
   1235     cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_WIDE_ARRAY_DATA_OFFSET) / 8), %ecx
   1236     ja RAW_VAR(slow_path)
   1237     // Add array header + alignment rounding.
   1238     leal ((MIRROR_WIDE_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) / 8)(%ecx), %edx
   1239     sall MACRO_LITERAL(3), %edx
   1240 END_MACRO
   1241 
   1242 MACRO3(GENERATE_ALLOC_ARRAY_TLAB, c_entrypoint, cxx_name, size_setup)
   1243     DEFINE_FUNCTION VAR(c_entrypoint)
   1244     // EAX: mirror::Class* klass, ECX: int32_t component_count
   1245     PUSH edi
   1246     CALL_MACRO(size_setup) .Lslow_path\c_entrypoint
   1247     ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED_WITH_SIZE .Lslow_path\c_entrypoint
   1248 .Lslow_path\c_entrypoint:
   1249     POP edi
   1250     SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx                        // save ref containing registers for GC
   1251     // Outgoing argument set up
   1252     PUSH eax                                                   // alignment padding
   1253     pushl %fs:THREAD_SELF_OFFSET                               // pass Thread::Current()
   1254     CFI_ADJUST_CFA_OFFSET(4)
   1255     PUSH ecx
   1256     PUSH eax
   1257     call CALLVAR(cxx_name)                                     // cxx_name(arg0, arg1, Thread*)
   1258     addl LITERAL(16), %esp                                     // pop arguments
   1259     CFI_ADJUST_CFA_OFFSET(-16)
   1260     RESTORE_SAVE_REFS_ONLY_FRAME                               // restore frame up to return address
   1261     RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER                    // return or deliver exception
   1262     END_FUNCTION VAR(c_entrypoint)
   1263 END_MACRO
   1264 
   1265 
   1266 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_UNKNOWN
   1267 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved8_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_8
   1268 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_16
   1269 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_32
   1270 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_64
   1271 
   1272 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_UNKNOWN
   1273 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved8_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_8
   1274 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_16
   1275 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_32
   1276 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_64
   1277 
   1278 ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
   1279 ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT art_quick_initialize_type, artInitializeTypeFromCode
   1280 ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode
   1281 ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
   1282 
   1283 TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
   1284 
   1285 DEFINE_FUNCTION art_quick_lock_object
   1286     testl %eax, %eax                      // null check object/eax
   1287     jz   .Lslow_lock
   1288 .Lretry_lock:
   1289     movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx  // ecx := lock word
   1290     test LITERAL(LOCK_WORD_STATE_MASK), %ecx         // test the 2 high bits.
   1291     jne  .Lslow_lock                      // slow path if either of the two high bits are set.
   1292     movl %ecx, %edx                       // save lock word (edx) to keep read barrier bits.
   1293     andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %ecx  // zero the gc bits.
   1294     test %ecx, %ecx
   1295     jnz  .Lalready_thin                   // lock word contains a thin lock
   1296     // unlocked case - edx: original lock word, eax: obj.
   1297     movl %eax, %ecx                       // remember object in case of retry
   1298     movl %edx, %eax                       // eax: lock word zero except for read barrier bits.
   1299     movl %fs:THREAD_ID_OFFSET, %edx       // load thread id.
   1300     or   %eax, %edx                       // edx: thread id with count of 0 + read barrier bits.
   1301     lock cmpxchg  %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx)  // eax: old val, edx: new val.
   1302     jnz  .Llock_cmpxchg_fail              // cmpxchg failed retry
   1303     ret
   1304 .Lalready_thin:  // edx: lock word (with high 2 bits zero and original rb bits), eax: obj.
   1305     movl %fs:THREAD_ID_OFFSET, %ecx       // ecx := thread id
   1306     cmpw %cx, %dx                         // do we hold the lock already?
   1307     jne  .Lslow_lock
   1308     movl %edx, %ecx                       // copy the lock word to check count overflow.
   1309     andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %ecx  // zero the read barrier bits.
   1310     addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx  // increment recursion count for overflow check.
   1311     test LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED), %ecx  // overflowed if the first gc state bit is set.
   1312     jne  .Lslow_lock                      // count overflowed so go slow
   1313     movl %eax, %ecx                       // save obj to use eax for cmpxchg.
   1314     movl %edx, %eax                       // copy the lock word as the old val for cmpxchg.
   1315     addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx  // increment recursion count again for real.
   1316     // update lockword, cmpxchg necessary for read barrier bits.
   1317     lock cmpxchg  %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%ecx)  // eax: old val, edx: new val.
   1318     jnz  .Llock_cmpxchg_fail              // cmpxchg failed retry
   1319     ret
   1320 .Llock_cmpxchg_fail:
   1321     movl  %ecx, %eax                      // restore eax
   1322     jmp  .Lretry_lock
   1323 .Lslow_lock:
   1324     SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx  // save ref containing registers for GC
   1325     // Outgoing argument set up
   1326     subl LITERAL(8), %esp                 // alignment padding
   1327     CFI_ADJUST_CFA_OFFSET(8)
   1328     pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
   1329     CFI_ADJUST_CFA_OFFSET(4)
   1330     PUSH eax                              // pass object
   1331     call SYMBOL(artLockObjectFromCode)    // artLockObjectFromCode(object, Thread*)
   1332     addl LITERAL(16), %esp                // pop arguments
   1333     CFI_ADJUST_CFA_OFFSET(-16)
   1334     RESTORE_SAVE_REFS_ONLY_FRAME          // restore frame up to return address
   1335     RETURN_IF_EAX_ZERO
   1336 END_FUNCTION art_quick_lock_object
   1337 
   1338 DEFINE_FUNCTION art_quick_lock_object_no_inline
   1339     SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx  // save ref containing registers for GC
   1340     // Outgoing argument set up
   1341     subl LITERAL(8), %esp                 // alignment padding
   1342     CFI_ADJUST_CFA_OFFSET(8)
   1343     pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
   1344     CFI_ADJUST_CFA_OFFSET(4)
   1345     PUSH eax                              // pass object
   1346     call SYMBOL(artLockObjectFromCode)    // artLockObjectFromCode(object, Thread*)
   1347     addl LITERAL(16), %esp                // pop arguments
   1348     CFI_ADJUST_CFA_OFFSET(-16)
   1349     RESTORE_SAVE_REFS_ONLY_FRAME          // restore frame up to return address
   1350     RETURN_IF_EAX_ZERO
   1351 END_FUNCTION art_quick_lock_object_no_inline
   1352 
   1353 
   1354 DEFINE_FUNCTION art_quick_unlock_object
   1355     testl %eax, %eax                      // null check object/eax
   1356     jz   .Lslow_unlock
   1357 .Lretry_unlock:
   1358     movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%eax), %ecx  // ecx := lock word
   1359     movl %fs:THREAD_ID_OFFSET, %edx       // edx := thread id
   1360     test LITERAL(LOCK_WORD_STATE_MASK), %ecx
   1361     jnz  .Lslow_unlock                    // lock word contains a monitor
   1362     cmpw %cx, %dx                         // does the thread id match?
   1363     jne  .Lslow_unlock
   1364     movl %ecx, %edx                       // copy the lock word to detect new count of 0.
   1365     andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %edx  // zero the gc bits.
   1366     cmpl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx
   1367     jae  .Lrecursive_thin_unlock
   1368     // update lockword, cmpxchg necessary for read barrier bits.
   1369     movl %eax, %edx                       // edx: obj
   1370     movl %ecx, %eax                       // eax: old lock word.
   1371     andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED), %ecx  // ecx: new lock word zero except original rb bits.
   1372 #ifndef USE_READ_BARRIER
   1373     movl %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)
   1374 #else
   1375     lock cmpxchg  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)  // eax: old val, ecx: new val.
   1376     jnz  .Lunlock_cmpxchg_fail            // cmpxchg failed retry
   1377 #endif
   1378     ret
   1379 .Lrecursive_thin_unlock:  // ecx: original lock word, eax: obj
   1380     // update lockword, cmpxchg necessary for read barrier bits.
   1381     movl %eax, %edx                       // edx: obj
   1382     movl %ecx, %eax                       // eax: old lock word.
   1383     subl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx  // ecx: new lock word with decremented count.
   1384 #ifndef USE_READ_BARRIER
   1385     mov  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)
   1386 #else
   1387     lock cmpxchg  %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edx)  // eax: old val, ecx: new val.
   1388     jnz  .Lunlock_cmpxchg_fail            // cmpxchg failed retry
   1389 #endif
   1390     ret
   1391 .Lunlock_cmpxchg_fail:  // edx: obj
   1392     movl %edx, %eax                       // restore eax
   1393     jmp  .Lretry_unlock
   1394 .Lslow_unlock:
   1395     SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx  // save ref containing registers for GC
   1396     // Outgoing argument set up
   1397     subl LITERAL(8), %esp                 // alignment padding
   1398     CFI_ADJUST_CFA_OFFSET(8)
   1399     pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
   1400     CFI_ADJUST_CFA_OFFSET(4)
   1401     PUSH eax                              // pass object
   1402     call SYMBOL(artUnlockObjectFromCode)  // artUnlockObjectFromCode(object, Thread*)
   1403     addl LITERAL(16), %esp                // pop arguments
   1404     CFI_ADJUST_CFA_OFFSET(-16)
   1405     RESTORE_SAVE_REFS_ONLY_FRAME          // restore frame up to return address
   1406     RETURN_IF_EAX_ZERO
   1407 END_FUNCTION art_quick_unlock_object
   1408 
   1409 DEFINE_FUNCTION art_quick_unlock_object_no_inline
   1410     SETUP_SAVE_REFS_ONLY_FRAME  ebx, ebx  // save ref containing registers for GC
   1411     // Outgoing argument set up
   1412     subl LITERAL(8), %esp                 // alignment padding
   1413     CFI_ADJUST_CFA_OFFSET(8)
   1414     pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
   1415     CFI_ADJUST_CFA_OFFSET(4)
   1416     PUSH eax                              // pass object
   1417     call SYMBOL(artUnlockObjectFromCode)  // artUnlockObjectFromCode(object, Thread*)
   1418     addl LITERAL(16), %esp                // pop arguments
   1419     CFI_ADJUST_CFA_OFFSET(-16)
   1420     RESTORE_SAVE_REFS_ONLY_FRAME          // restore frame up to return address
   1421     RETURN_IF_EAX_ZERO
   1422 END_FUNCTION art_quick_unlock_object_no_inline
   1423 
   1424 DEFINE_FUNCTION art_quick_instance_of
   1425     PUSH eax                              // alignment padding
   1426     PUSH ecx                              // pass arg2 - obj->klass
   1427     PUSH eax                              // pass arg1 - checked class
   1428     call SYMBOL(artInstanceOfFromCode)    // (Object* obj, Class* ref_klass)
   1429     addl LITERAL(12), %esp                // pop arguments
   1430     CFI_ADJUST_CFA_OFFSET(-12)
   1431     ret
   1432 END_FUNCTION art_quick_instance_of
   1433 
   1434 DEFINE_FUNCTION art_quick_check_instance_of
   1435     PUSH eax                              // alignment padding
   1436     PUSH ecx                              // pass arg2 - checked class
   1437     PUSH eax                              // pass arg1 - obj
   1438     call SYMBOL(artInstanceOfFromCode)    // (Object* obj, Class* ref_klass)
   1439     testl %eax, %eax
   1440     jz .Lthrow_class_cast_exception       // jump forward if not assignable
   1441     addl LITERAL(12), %esp                // pop arguments
   1442     CFI_ADJUST_CFA_OFFSET(-12)
   1443     ret
   1444     CFI_ADJUST_CFA_OFFSET(12)             // Reset unwind info so following code unwinds.
   1445 
   1446 .Lthrow_class_cast_exception:
   1447     POP eax                               // pop arguments
   1448     POP ecx
   1449     addl LITERAL(4), %esp
   1450     CFI_ADJUST_CFA_OFFSET(-4)
   1451 
   1452     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save all registers as basis for long jump context
   1453     // Outgoing argument set up
   1454     PUSH eax                              // alignment padding
   1455     pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
   1456     CFI_ADJUST_CFA_OFFSET(4)
   1457     PUSH ecx                              // pass arg2
   1458     PUSH eax                              // pass arg1
   1459     call SYMBOL(artThrowClassCastExceptionForObject)  // (Object* src, Class* dest, Thread*)
   1460     UNREACHABLE
   1461 END_FUNCTION art_quick_check_instance_of
   1462 
   1463 // Restore reg's value if reg is not the same as exclude_reg, otherwise just adjust stack.
   1464 MACRO2(POP_REG_NE, reg, exclude_reg)
   1465     .ifc RAW_VAR(reg), RAW_VAR(exclude_reg)
   1466       addl MACRO_LITERAL(4), %esp
   1467       CFI_ADJUST_CFA_OFFSET(-4)
   1468     .else
   1469       POP RAW_VAR(reg)
   1470     .endif
   1471 END_MACRO
   1472 
   1473     /*
   1474      * Macro to insert read barrier, only used in art_quick_aput_obj.
   1475      * obj_reg and dest_reg are registers, offset is a defined literal such as
   1476      * MIRROR_OBJECT_CLASS_OFFSET.
   1477      * pop_eax is a boolean flag, indicating if eax is popped after the call.
   1478      * TODO: When read barrier has a fast path, add heap unpoisoning support for the fast path.
   1479      */
   1480 MACRO4(READ_BARRIER, obj_reg, offset, dest_reg, pop_eax)
   1481 #ifdef USE_READ_BARRIER
   1482     PUSH eax                        // save registers used in art_quick_aput_obj
   1483     PUSH ebx
   1484     PUSH edx
   1485     PUSH ecx
   1486     // Outgoing argument set up
   1487     pushl MACRO_LITERAL((RAW_VAR(offset)))  // pass offset, double parentheses are necessary
   1488     CFI_ADJUST_CFA_OFFSET(4)
   1489     PUSH RAW_VAR(obj_reg)           // pass obj_reg
   1490     PUSH eax                        // pass ref, just pass eax for now since parameter ref is unused
   1491     call SYMBOL(artReadBarrierSlow) // artReadBarrierSlow(ref, obj_reg, offset)
   1492     // No need to unpoison return value in eax, artReadBarrierSlow() would do the unpoisoning.
   1493     .ifnc RAW_VAR(dest_reg), eax
   1494       movl %eax, REG_VAR(dest_reg)  // save loaded ref in dest_reg
   1495     .endif
   1496     addl MACRO_LITERAL(12), %esp    // pop arguments
   1497     CFI_ADJUST_CFA_OFFSET(-12)
   1498     POP_REG_NE ecx, RAW_VAR(dest_reg) // Restore args except dest_reg
   1499     POP_REG_NE edx, RAW_VAR(dest_reg)
   1500     POP_REG_NE ebx, RAW_VAR(dest_reg)
   1501     .ifc RAW_VAR(pop_eax), true
   1502       POP_REG_NE eax, RAW_VAR(dest_reg)
   1503     .endif
   1504 #else
   1505     movl RAW_VAR(offset)(REG_VAR(obj_reg)), REG_VAR(dest_reg)
   1506     UNPOISON_HEAP_REF RAW_VAR(dest_reg)
   1507 #endif  // USE_READ_BARRIER
   1508 END_MACRO
   1509 
   1510 DEFINE_FUNCTION art_quick_aput_obj
   1511     test %edx, %edx              // store of null
   1512     jz .Ldo_aput_null
   1513     READ_BARRIER eax, MIRROR_OBJECT_CLASS_OFFSET, ebx, true
   1514     READ_BARRIER ebx, MIRROR_CLASS_COMPONENT_TYPE_OFFSET, ebx, true
   1515     // value's type == array's component type - trivial assignability
   1516 #if defined(USE_READ_BARRIER)
   1517     READ_BARRIER edx, MIRROR_OBJECT_CLASS_OFFSET, eax, false
   1518     cmpl %eax, %ebx
   1519     POP eax                      // restore eax from the push in the beginning of READ_BARRIER macro
   1520     // This asymmetric push/pop saves a push of eax and maintains stack alignment.
   1521 #elif defined(USE_HEAP_POISONING)
   1522     PUSH eax                     // save eax
   1523     movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %eax
   1524     UNPOISON_HEAP_REF eax
   1525     cmpl %eax, %ebx
   1526     POP eax                      // restore eax
   1527 #else
   1528     cmpl MIRROR_OBJECT_CLASS_OFFSET(%edx), %ebx
   1529 #endif
   1530     jne .Lcheck_assignability
   1531 .Ldo_aput:
   1532     POISON_HEAP_REF edx
   1533     movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
   1534     movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
   1535     shrl LITERAL(CARD_TABLE_CARD_SHIFT), %eax
   1536     movb %dl, (%edx, %eax)
   1537     ret
   1538 .Ldo_aput_null:
   1539     movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
   1540     ret
   1541 .Lcheck_assignability:
   1542     PUSH eax                      // save arguments
   1543     PUSH ecx
   1544     PUSH edx
   1545 #if defined(USE_READ_BARRIER)
   1546     subl LITERAL(4), %esp         // alignment padding
   1547     CFI_ADJUST_CFA_OFFSET(4)
   1548     READ_BARRIER edx, MIRROR_OBJECT_CLASS_OFFSET, eax, true
   1549     subl LITERAL(4), %esp         // alignment padding
   1550     CFI_ADJUST_CFA_OFFSET(4)
   1551     PUSH eax                      // pass arg2 - type of the value to be stored
   1552 #elif defined(USE_HEAP_POISONING)
   1553     subl LITERAL(8), %esp         // alignment padding
   1554     CFI_ADJUST_CFA_OFFSET(8)
   1555     movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %eax
   1556     UNPOISON_HEAP_REF eax
   1557     PUSH eax                      // pass arg2 - type of the value to be stored
   1558 #else
   1559     subl LITERAL(8), %esp         // alignment padding
   1560     CFI_ADJUST_CFA_OFFSET(8)
   1561     pushl MIRROR_OBJECT_CLASS_OFFSET(%edx)  // pass arg2 - type of the value to be stored
   1562     CFI_ADJUST_CFA_OFFSET(4)
   1563 #endif
   1564     PUSH ebx                      // pass arg1 - component type of the array
   1565     call SYMBOL(artIsAssignableFromCode)  // (Class* a, Class* b)
   1566     addl LITERAL(16), %esp        // pop arguments
   1567     CFI_ADJUST_CFA_OFFSET(-16)
   1568     testl %eax, %eax
   1569     jz   .Lthrow_array_store_exception
   1570     POP  edx
   1571     POP  ecx
   1572     POP  eax
   1573     POISON_HEAP_REF edx
   1574     movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)  // do the aput
   1575     movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
   1576     shrl LITERAL(CARD_TABLE_CARD_SHIFT), %eax
   1577     movb %dl, (%edx, %eax)
   1578     ret
   1579     CFI_ADJUST_CFA_OFFSET(12)     // 3 POP after the jz for unwinding.
   1580 .Lthrow_array_store_exception:
   1581     POP  edx
   1582     POP  ecx
   1583     POP  eax
   1584     SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx, ebx // save all registers as basis for long jump context
   1585     // Outgoing argument set up
   1586     PUSH eax                      // alignment padding
   1587     pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
   1588     CFI_ADJUST_CFA_OFFSET(4)
   1589     PUSH edx                      // pass arg2 - value
   1590     PUSH eax                      // pass arg1 - array
   1591     call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*)
   1592     UNREACHABLE
   1593 END_FUNCTION art_quick_aput_obj
   1594 
   1595 DEFINE_FUNCTION art_quick_memcpy
   1596     SETUP_GOT_NOSAVE ebx          // clobbers EBX
   1597     PUSH edx                      // pass arg3
   1598     PUSH ecx                      // pass arg2
   1599     PUSH eax                      // pass arg1
   1600     call PLT_SYMBOL(memcpy)       // (void*, const void*, size_t)
   1601     addl LITERAL(12), %esp        // pop arguments
   1602     CFI_ADJUST_CFA_OFFSET(-12)
   1603     ret
   1604 END_FUNCTION art_quick_memcpy
   1605 
   1606 DEFINE_FUNCTION art_quick_test_suspend
   1607     SETUP_SAVE_EVERYTHING_FRAME ebx, ebx, RUNTIME_SAVE_EVERYTHING_FOR_SUSPEND_CHECK_METHOD_OFFSET  // save everything for GC
   1608     // Outgoing argument set up
   1609     subl MACRO_LITERAL(12), %esp                      // push padding
   1610     CFI_ADJUST_CFA_OFFSET(12)
   1611     pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
   1612     CFI_ADJUST_CFA_OFFSET(4)
   1613     call SYMBOL(artTestSuspendFromCode)               // (Thread*)
   1614     addl MACRO_LITERAL(16), %esp                      // pop arguments
   1615     CFI_ADJUST_CFA_OFFSET(-16)
   1616     RESTORE_SAVE_EVERYTHING_FRAME                     // restore frame up to return address
   1617     ret                                               // return
   1618 END_FUNCTION art_quick_test_suspend
   1619 
   1620 DEFINE_FUNCTION art_quick_d2l
   1621     subl LITERAL(12), %esp        // alignment padding, room for argument
   1622     CFI_ADJUST_CFA_OFFSET(12)
   1623     movsd %xmm0, 0(%esp)          // arg a
   1624     call SYMBOL(art_d2l)          // (jdouble a)
   1625     addl LITERAL(12), %esp        // pop arguments
   1626     CFI_ADJUST_CFA_OFFSET(-12)
   1627     ret
   1628 END_FUNCTION art_quick_d2l
   1629 
   1630 DEFINE_FUNCTION art_quick_f2l
   1631     subl LITERAL(12), %esp        // alignment padding
   1632     CFI_ADJUST_CFA_OFFSET(12)
   1633     movss %xmm0, 0(%esp)          // arg a
   1634     call SYMBOL(art_f2l)          // (jfloat a)
   1635     addl LITERAL(12), %esp        // pop arguments
   1636     CFI_ADJUST_CFA_OFFSET(-12)
   1637     ret
   1638 END_FUNCTION art_quick_f2l
   1639 
   1640 DEFINE_FUNCTION art_quick_ldiv
   1641     subl LITERAL(12), %esp        // alignment padding
   1642     CFI_ADJUST_CFA_OFFSET(12)
   1643     PUSH ebx                      // pass arg4 b.hi
   1644     PUSH edx                      // pass arg3 b.lo
   1645     PUSH ecx                      // pass arg2 a.hi
   1646     PUSH eax                      // pass arg1 a.lo
   1647     call SYMBOL(artLdiv)          // (jlong a, jlong b)
   1648     addl LITERAL(28), %esp        // pop arguments
   1649     CFI_ADJUST_CFA_OFFSET(-28)
   1650     ret
   1651 END_FUNCTION art_quick_ldiv
   1652 
   1653 DEFINE_FUNCTION art_quick_lmod
   1654     subl LITERAL(12), %esp        // alignment padding
   1655     CFI_ADJUST_CFA_OFFSET(12)
   1656     PUSH ebx                      // pass arg4 b.hi
   1657     PUSH edx                      // pass arg3 b.lo
   1658     PUSH ecx                      // pass arg2 a.hi
   1659     PUSH eax                      // pass arg1 a.lo
   1660     call SYMBOL(artLmod)          // (jlong a, jlong b)
   1661     addl LITERAL(28), %esp        // pop arguments
   1662     CFI_ADJUST_CFA_OFFSET(-28)
   1663     ret
   1664 END_FUNCTION art_quick_lmod
   1665 
   1666 DEFINE_FUNCTION art_quick_lmul
   1667     imul %eax, %ebx               // ebx = a.lo(eax) * b.hi(ebx)
   1668     imul %edx, %ecx               // ecx = b.lo(edx) * a.hi(ecx)
   1669     mul  %edx                     // edx:eax = a.lo(eax) * b.lo(edx)
   1670     add  %ebx, %ecx
   1671     add  %ecx, %edx               // edx += (a.lo * b.hi) + (b.lo * a.hi)
   1672     ret
   1673 END_FUNCTION art_quick_lmul
   1674 
   1675 DEFINE_FUNCTION art_quick_lshl
   1676     // ecx:eax << edx
   1677     xchg %edx, %ecx
   1678     shld %cl,%eax,%edx
   1679     shl  %cl,%eax
   1680     test LITERAL(32), %cl
   1681     jz  1f
   1682     mov %eax, %edx
   1683     xor %eax, %eax
   1684 1:
   1685     ret
   1686 END_FUNCTION art_quick_lshl
   1687 
   1688 DEFINE_FUNCTION art_quick_lshr
   1689     // ecx:eax >> edx
   1690     xchg %edx, %ecx
   1691     shrd %cl,%edx,%eax
   1692     sar  %cl,%edx
   1693     test LITERAL(32),%cl
   1694     jz  1f
   1695     mov %edx, %eax
   1696     sar LITERAL(31), %edx
   1697 1:
   1698     ret
   1699 END_FUNCTION art_quick_lshr
   1700 
   1701 DEFINE_FUNCTION art_quick_lushr
   1702     // ecx:eax >>> edx
   1703     xchg %edx, %ecx
   1704     shrd %cl,%edx,%eax
   1705     shr  %cl,%edx
   1706     test LITERAL(32),%cl
   1707     jz  1f
   1708     mov %edx, %eax
   1709     xor %edx, %edx
   1710 1:
   1711     ret
   1712 END_FUNCTION art_quick_lushr
   1713 
   1714 // Note: Functions `art{Get,Set}<Kind>{Static,Instance}FromCompiledCode` are
   1715 // defined with a macro in runtime/entrypoints/quick/quick_field_entrypoints.cc.
   1716 
   1717 ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1718 ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1719 ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1720 ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1721 ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1722 ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1723 ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1724 
   1725 TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1726 TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1727 TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1728 TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1729 TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1730 TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1731 TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
   1732 
   1733 TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCompiledCode, RETURN_IF_EAX_ZERO
   1734 TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCompiledCode, RETURN_IF_EAX_ZERO
   1735 TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCompiledCode, RETURN_IF_EAX_ZERO
   1736 TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCompiledCode, RETURN_IF_EAX_ZERO
   1737 
   1738 THREE_ARG_REF_DOWNCALL art_quick_set64_static, artSet64StaticFromCompiledCode, RETURN_IF_EAX_ZERO
   1739 THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCompiledCode, RETURN_IF_EAX_ZERO
   1740 THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCompiledCode, RETURN_IF_EAX_ZERO
   1741 THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCompiledCode, RETURN_IF_EAX_ZERO
   1742 THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCompiledCode, RETURN_IF_EAX_ZERO
   1743 
   1744 // Call artSet64InstanceFromCode with 4 word size arguments.
   1745 DEFINE_FUNCTION art_quick_set64_instance
   1746     movd %ebx, %xmm0
   1747     SETUP_SAVE_REFS_ONLY_FRAME ebx, ebx  // save ref containing registers for GC
   1748     movd %xmm0, %ebx
   1749     // Outgoing argument set up
   1750     subl LITERAL(12), %esp         // alignment padding
   1751     CFI_ADJUST_CFA_OFFSET(12)
   1752     pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
   1753     CFI_ADJUST_CFA_OFFSET(4)
   1754     PUSH ebx                      // pass high half of new_val
   1755     PUSH edx                      // pass low half of new_val
   1756     PUSH ecx                      // pass object
   1757     PUSH eax                      // pass field_idx
   1758     call SYMBOL(artSet64InstanceFromCompiledCode)  // (field_idx, Object*, new_val, Thread*)
   1759     addl LITERAL(32), %esp        // pop arguments
   1760     CFI_ADJUST_CFA_OFFSET(-32)
   1761     RESTORE_SAVE_REFS_ONLY_FRAME  // restore frame up to return address
   1762     RETURN_IF_EAX_ZERO            // return or deliver exception
   1763 END_FUNCTION art_quick_set64_instance
   1764 
   1765 DEFINE_FUNCTION art_quick_proxy_invoke_handler
   1766     SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_EAX
   1767     PUSH esp                      // pass SP
   1768     pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
   1769     CFI_ADJUST_CFA_OFFSET(4)
   1770     PUSH ecx                      // pass receiver
   1771     PUSH eax                      // pass proxy method
   1772     call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP)
   1773     movd %eax, %xmm0              // place return value also into floating point return value
   1774     movd %edx, %xmm1
   1775     punpckldq %xmm1, %xmm0
   1776     addl LITERAL(16 + FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_REFS_ONLY), %esp
   1777     CFI_ADJUST_CFA_OFFSET(-(16 + FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_REFS_ONLY))
   1778     RESTORE_SAVE_REFS_ONLY_FRAME
   1779     RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
   1780 END_FUNCTION art_quick_proxy_invoke_handler
   1781 
   1782     /*
   1783      * Called to resolve an imt conflict.
   1784      * eax is the conflict ArtMethod.
   1785      * xmm7 is a hidden argument that holds the target interface method's dex method index.
   1786      *
   1787      * Note that this stub writes to eax.
   1788      * Because of lack of free registers, it also saves and restores edi.
   1789      */
   1790 DEFINE_FUNCTION art_quick_imt_conflict_trampoline
   1791     PUSH EDI
   1792     PUSH ESI
   1793     PUSH EDX
   1794     movl 16(%esp), %edi         // Load referrer.
   1795     // If the method is obsolete, just go through the dex cache miss slow path.
   1796     // The obsolete flag is set with suspended threads, so we do not need an acquire operation here.
   1797     testl LITERAL(ACC_OBSOLETE_METHOD), ART_METHOD_ACCESS_FLAGS_OFFSET(%edi)
   1798     jnz .Limt_conflict_trampoline_dex_cache_miss
   1799     movl ART_METHOD_DECLARING_CLASS_OFFSET(%edi), %edi // Load declaring class (no read barrier).
   1800     movl MIRROR_CLASS_DEX_CACHE_OFFSET(%edi), %edi     // Load the DexCache (without read barrier).
   1801     UNPOISON_HEAP_REF edi
   1802     movl MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET(%edi), %edi  // Load the resolved methods.
   1803     pushl ART_METHOD_JNI_OFFSET_32(%eax)  // Push ImtConflictTable.
   1804     CFI_ADJUST_CFA_OFFSET(4)
   1805     movd %xmm7, %eax            // Get target method index stored in xmm7.
   1806     movl %eax, %esi             // Remember method index in ESI.
   1807     andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax  // Calculate DexCache method slot index.
   1808     leal 0(%edi, %eax, 2 * __SIZEOF_POINTER__), %edi  // Load DexCache method slot address.
   1809     mov %ecx, %edx              // Make EDX:EAX == ECX:EBX so that LOCK CMPXCHG8B makes no changes.
   1810     mov %ebx, %eax              // (The actual value does not matter.)
   1811     lock cmpxchg8b (%edi)       // Relaxed atomic load EDX:EAX from the dex cache slot.
   1812     popl %edi                   // Pop ImtConflictTable.
   1813     CFI_ADJUST_CFA_OFFSET(-4)
   1814     cmp %edx, %esi              // Compare method index to see if we had a DexCache method hit.
   1815     jne .Limt_conflict_trampoline_dex_cache_miss
   1816 .Limt_table_iterate:
   1817     cmpl %eax, 0(%edi)
   1818     jne .Limt_table_next_entry
   1819     // We successfully hit an entry in the table. Load the target method
   1820     // and jump to it.
   1821     movl __SIZEOF_POINTER__(%edi), %eax
   1822     CFI_REMEMBER_STATE
   1823     POP EDX
   1824     POP ESI
   1825     POP EDI
   1826     jmp *ART_METHOD_QUICK_CODE_OFFSET_32(%eax)
   1827     CFI_RESTORE_STATE
   1828 .Limt_table_next_entry:
   1829     // If the entry is null, the interface method is not in the ImtConflictTable.
   1830     cmpl LITERAL(0), 0(%edi)
   1831     jz .Lconflict_trampoline
   1832     // Iterate over the entries of the ImtConflictTable.
   1833     addl LITERAL(2 * __SIZEOF_POINTER__), %edi
   1834     jmp .Limt_table_iterate
   1835 .Lconflict_trampoline:
   1836     // Call the runtime stub to populate the ImtConflictTable and jump to the
   1837     // resolved method.
   1838     CFI_REMEMBER_STATE
   1839     POP EDX
   1840     POP ESI
   1841     POP EDI
   1842     INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
   1843     CFI_RESTORE_STATE
   1844 .Limt_conflict_trampoline_dex_cache_miss:
   1845     // We're not creating a proper runtime method frame here,
   1846     // artLookupResolvedMethod() is not allowed to walk the stack.
   1847 
   1848     // Save core register args; EDX is already saved.
   1849     PUSH ebx
   1850     PUSH ecx
   1851 
   1852     // Save FPR args.
   1853     subl MACRO_LITERAL(32), %esp
   1854     CFI_ADJUST_CFA_OFFSET(32)
   1855     movsd %xmm0, 0(%esp)
   1856     movsd %xmm1, 8(%esp)
   1857     movsd %xmm2, 16(%esp)
   1858     movsd %xmm3, 24(%esp)
   1859 
   1860     pushl 32+8+16(%esp)         // Pass referrer.
   1861     CFI_ADJUST_CFA_OFFSET(4)
   1862     pushl %esi                  // Pass method index.
   1863     CFI_ADJUST_CFA_OFFSET(4)
   1864     call SYMBOL(artLookupResolvedMethod)  // (uint32_t method_index, ArtMethod* referrer)
   1865     addl LITERAL(8), %esp       // Pop arguments.
   1866     CFI_ADJUST_CFA_OFFSET(-8)
   1867 
   1868     // Restore FPR args.
   1869     movsd 0(%esp), %xmm0
   1870     movsd 8(%esp), %xmm1
   1871     movsd 16(%esp), %xmm2
   1872     movsd 24(%esp), %xmm3
   1873     addl MACRO_LITERAL(32), %esp
   1874     CFI_ADJUST_CFA_OFFSET(-32)
   1875 
   1876     // Restore core register args.
   1877     POP ecx
   1878     POP ebx
   1879 
   1880     cmp LITERAL(0), %eax        // If the method wasn't resolved,
   1881     je .Lconflict_trampoline    //   skip the lookup and go to artInvokeInterfaceTrampoline().
   1882     jmp .Limt_table_iterate
   1883 END_FUNCTION art_quick_imt_conflict_trampoline
   1884 
   1885 DEFINE_FUNCTION art_quick_resolution_trampoline
   1886     SETUP_SAVE_REFS_AND_ARGS_FRAME ebx, ebx
   1887     movl %esp, %edi
   1888     PUSH EDI                      // pass SP. do not just PUSH ESP; that messes up unwinding
   1889     pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
   1890     CFI_ADJUST_CFA_OFFSET(4)
   1891     PUSH ecx                      // pass receiver
   1892     PUSH eax                      // pass method
   1893     call SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP)
   1894     movl %eax, %edi               // remember code pointer in EDI
   1895     addl LITERAL(16), %esp        // pop arguments
   1896     CFI_ADJUST_CFA_OFFSET(-16)
   1897     test %eax, %eax               // if code pointer is null goto deliver pending exception
   1898     jz 1f
   1899     RESTORE_SAVE_REFS_AND_ARGS_FRAME_AND_JUMP
   1900 1:
   1901     RESTORE_SAVE_REFS_AND_ARGS_FRAME
   1902     DELIVER_PENDING_EXCEPTION
   1903 END_FUNCTION art_quick_resolution_trampoline
   1904 
   1905 DEFINE_FUNCTION art_quick_generic_jni_trampoline
   1906     SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_EAX
   1907     movl %esp, %ebp               // save SP at callee-save frame
   1908     CFI_DEF_CFA_REGISTER(ebp)
   1909     subl LITERAL(5120), %esp
   1910     // prepare for artQuickGenericJniTrampoline call
   1911     // (Thread*,  SP)
   1912     //  (esp)    4(esp)   <= C calling convention
   1913     //  fs:...    ebp     <= where they are
   1914 
   1915     subl LITERAL(8), %esp         // Padding for 16B alignment.
   1916     pushl %ebp                    // Pass SP (to ArtMethod).
   1917     pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
   1918     call SYMBOL(artQuickGenericJniTrampoline)  // (Thread*, sp)
   1919 
   1920     // The C call will have registered the complete save-frame on success.
   1921     // The result of the call is:
   1922     // eax: pointer to native code, 0 on error.
   1923     // edx: pointer to the bottom of the used area of the alloca, can restore stack till there.
   1924 
   1925     // Check for error = 0.
   1926     test %eax, %eax
   1927     jz .Lexception_in_native
   1928 
   1929     // Release part of the alloca.
   1930     movl %edx, %esp
   1931 
   1932     // On x86 there are no registers passed, so nothing to pop here.
   1933     // Native call.
   1934     call *%eax
   1935 
   1936     // result sign extension is handled in C code
   1937     // prepare for artQuickGenericJniEndTrampoline call
   1938     // (Thread*, result, result_f)
   1939     //  (esp)    4(esp)  12(esp)    <= C calling convention
   1940     //  fs:...  eax:edx   fp0      <= where they are
   1941 
   1942     subl LITERAL(20), %esp        // Padding & pass float result.
   1943     fstpl (%esp)
   1944     pushl %edx                    // Pass int result.
   1945     pushl %eax
   1946     pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
   1947     call SYMBOL(artQuickGenericJniEndTrampoline)
   1948 
   1949     // Pending exceptions possible.
   1950     mov %fs:THREAD_EXCEPTION_OFFSET, %ebx
   1951     testl %ebx, %ebx
   1952     jnz .Lexception_in_native
   1953 
   1954     // Tear down the alloca.
   1955     movl %ebp, %esp
   1956     CFI_DEF_CFA_REGISTER(esp)
   1957 
   1958 
   1959     // Tear down the callee-save frame.
   1960     // Remove space for FPR args and EAX
   1961     addl LITERAL(4 + 4 * 8), %esp
   1962     CFI_ADJUST_CFA_OFFSET(-(4 + 4 * 8))
   1963 
   1964     POP ecx
   1965     addl LITERAL(4), %esp         // Avoid edx, as it may be part of the result.
   1966     CFI_ADJUST_CFA_OFFSET(-4)
   1967     POP ebx
   1968     POP ebp  // Restore callee saves
   1969     POP esi
   1970     POP edi
   1971     // Quick expects the return value to be in xmm0.
   1972     movd %eax, %xmm0
   1973     movd %edx, %xmm1
   1974     punpckldq %xmm1, %xmm0
   1975     ret
   1976 .Lexception_in_native:
   1977     pushl %fs:THREAD_TOP_QUICK_FRAME_OFFSET
   1978     addl LITERAL(-1), (%esp)  // Remove the GenericJNI tag.
   1979     movl (%esp), %esp
   1980     // Do a call to push a new save-all frame required by the runtime.
   1981     call .Lexception_call
   1982 .Lexception_call:
   1983     DELIVER_PENDING_EXCEPTION
   1984 END_FUNCTION art_quick_generic_jni_trampoline
   1985 
   1986 DEFINE_FUNCTION art_quick_to_interpreter_bridge
   1987     SETUP_SAVE_REFS_AND_ARGS_FRAME  ebx, ebx  // save frame
   1988     mov %esp, %edx                // remember SP
   1989     PUSH eax                      // alignment padding
   1990     PUSH edx                      // pass SP
   1991     pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
   1992     CFI_ADJUST_CFA_OFFSET(4)
   1993     PUSH eax                      // pass  method
   1994     call SYMBOL(artQuickToInterpreterBridge)  // (method, Thread*, SP)
   1995     addl LITERAL(16), %esp        // pop arguments
   1996     CFI_ADJUST_CFA_OFFSET(-16)
   1997 
   1998     // Return eax:edx in xmm0 also.
   1999     movd %eax, %xmm0
   2000     movd %edx, %xmm1
   2001     punpckldq %xmm1, %xmm0
   2002 
   2003     addl LITERAL(48), %esp        // Remove FPRs and EAX, ECX, EDX, EBX.
   2004     CFI_ADJUST_CFA_OFFSET(-48)
   2005 
   2006     POP ebp                       // Restore callee saves
   2007     POP esi
   2008     POP edi
   2009 
   2010     RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
   2011 END_FUNCTION art_quick_to_interpreter_bridge
   2012 
   2013     /*
   2014      * Called by managed code, saves callee saves and then calls artInvokeObsoleteMethod
   2015      */
   2016 ONE_ARG_RUNTIME_EXCEPTION art_invoke_obsolete_method_stub, artInvokeObsoleteMethod
   2017 
   2018     /*
   2019      * Routine that intercepts method calls and returns.
   2020      */
   2021 DEFINE_FUNCTION art_quick_instrumentation_entry
   2022     SETUP_SAVE_REFS_AND_ARGS_FRAME ebx, edx
   2023     PUSH eax                      // Save eax which will be clobbered by the callee-save method.
   2024     subl LITERAL(16), %esp        // Align stack (12 bytes) and reserve space for the SP argument
   2025     CFI_ADJUST_CFA_OFFSET(16)     // (4 bytes). We lack the scratch registers to calculate the SP
   2026                                   // right now, so we will just fill it in later.
   2027     pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
   2028     CFI_ADJUST_CFA_OFFSET(4)
   2029     PUSH ecx                      // Pass receiver.
   2030     PUSH eax                      // Pass Method*.
   2031     leal 32(%esp), %eax           // Put original SP into eax
   2032     movl %eax, 12(%esp)           // set SP
   2033     call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, SP)
   2034 
   2035     addl LITERAL(28), %esp        // Pop arguments upto saved Method*.
   2036     CFI_ADJUST_CFA_OFFSET(-28)
   2037 
   2038     testl %eax, %eax
   2039     jz 1f                         // Test for null return (indicating exception) and handle it.
   2040 
   2041     movl 60(%esp), %edi           // Restore edi.
   2042     movl %eax, 60(%esp)           // Place code* over edi, just under return pc.
   2043     movl SYMBOL(art_quick_instrumentation_exit)@GOT(%ebx), %ebx
   2044     // Place instrumentation exit as return pc. ebx holds the GOT computed on entry.
   2045     movl %ebx, 64(%esp)
   2046     movl 0(%esp), %eax           // Restore eax.
   2047     // Restore FPRs (extra 4 bytes of offset due to EAX push at top).
   2048     movsd 8(%esp), %xmm0
   2049     movsd 16(%esp), %xmm1
   2050     movsd 24(%esp), %xmm2
   2051     movsd 32(%esp), %xmm3
   2052 
   2053     // Restore GPRs.
   2054     movl 40(%esp), %ecx           // Restore ecx.
   2055     movl 44(%esp), %edx           // Restore edx.
   2056     movl 48(%esp), %ebx           // Restore ebx.
   2057     movl 52(%esp), %ebp           // Restore ebp.
   2058     movl 56(%esp), %esi           // Restore esi.
   2059     addl LITERAL(60), %esp        // Wind stack back upto code*.
   2060     CFI_ADJUST_CFA_OFFSET(-60)
   2061     ret                           // Call method (and pop).
   2062 1:
   2063     // Make caller handle exception
   2064     addl LITERAL(4), %esp
   2065     CFI_ADJUST_CFA_OFFSET(-4)
   2066     RESTORE_SAVE_REFS_AND_ARGS_FRAME
   2067     DELIVER_PENDING_EXCEPTION
   2068 END_FUNCTION art_quick_instrumentation_entry
   2069 
   2070 DEFINE_FUNCTION_CUSTOM_CFA art_quick_instrumentation_exit, 0
   2071     pushl LITERAL(0)              // Push a fake return PC as there will be none on the stack.
   2072     CFI_ADJUST_CFA_OFFSET(4)
   2073     SETUP_SAVE_EVERYTHING_FRAME ebx, ebx
   2074 
   2075     movl %esp, %ecx               // Remember SP
   2076     subl LITERAL(8), %esp         // Align stack.
   2077     CFI_ADJUST_CFA_OFFSET(8)
   2078     PUSH edx                      // Save gpr return value. edx and eax need to be together,
   2079                                   // which isn't the case in kSaveEverything frame.
   2080     PUSH eax
   2081     leal 32(%esp), %eax           // Get pointer to fpr_result, in kSaveEverything frame
   2082     movl %esp, %edx               // Get pointer to gpr_result
   2083     PUSH eax                      // Pass fpr_result
   2084     PUSH edx                      // Pass gpr_result
   2085     PUSH ecx                      // Pass SP
   2086     pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current.
   2087     CFI_ADJUST_CFA_OFFSET(4)
   2088 
   2089     call SYMBOL(artInstrumentationMethodExitFromCode)  // (Thread*, SP, gpr_result*, fpr_result*)
   2090     // Return result could have been changed if it's a reference.
   2091     movl 16(%esp), %ecx
   2092     movl %ecx, (80+32)(%esp)
   2093     addl LITERAL(32), %esp        // Pop arguments and grp_result.
   2094     CFI_ADJUST_CFA_OFFSET(-32)
   2095 
   2096     testl %eax, %eax              // Check if we returned error.
   2097     jz .Ldo_deliver_instrumentation_exception
   2098     testl %edx, %edx
   2099     jnz .Ldeoptimize
   2100     // Normal return.
   2101     movl %eax, FRAME_SIZE_SAVE_EVERYTHING-4(%esp)   // Set return pc.
   2102     RESTORE_SAVE_EVERYTHING_FRAME
   2103     ret
   2104 .Ldeoptimize:
   2105     mov %edx, (FRAME_SIZE_SAVE_EVERYTHING-4)(%esp)  // Set return pc.
   2106     RESTORE_SAVE_EVERYTHING_FRAME
   2107     jmp SYMBOL(art_quick_deoptimize)
   2108 .Ldo_deliver_instrumentation_exception:
   2109     DELIVER_PENDING_EXCEPTION_FRAME_READY
   2110 END_FUNCTION art_quick_instrumentation_exit
   2111 
   2112     /*
   2113      * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
   2114      * will long jump to the upcall with a special exception of -1.
   2115      */
   2116 DEFINE_FUNCTION art_quick_deoptimize
   2117     SETUP_SAVE_EVERYTHING_FRAME ebx, ebx
   2118     subl LITERAL(12), %esp        // Align stack.
   2119     CFI_ADJUST_CFA_OFFSET(12)
   2120     pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
   2121     CFI_ADJUST_CFA_OFFSET(4)
   2122     call SYMBOL(artDeoptimize)    // (Thread*)
   2123     UNREACHABLE
   2124 END_FUNCTION art_quick_deoptimize
   2125 
   2126     /*
   2127      * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
   2128      * will long jump to the interpreter bridge.
   2129      */
   2130 DEFINE_FUNCTION art_quick_deoptimize_from_compiled_code
   2131     SETUP_SAVE_EVERYTHING_FRAME ebx, ebx
   2132     subl LITERAL(8), %esp                      // Align stack.
   2133     CFI_ADJUST_CFA_OFFSET(8)
   2134     pushl %fs:THREAD_SELF_OFFSET                // Pass Thread::Current().
   2135     CFI_ADJUST_CFA_OFFSET(4)
   2136     PUSH eax
   2137     call SYMBOL(artDeoptimizeFromCompiledCode)  // (DeoptimizationKind, Thread*)
   2138     UNREACHABLE
   2139 END_FUNCTION art_quick_deoptimize_from_compiled_code
   2140 
   2141     /*
   2142      * String's compareTo.
   2143      *
   2144      * On entry:
   2145      *    eax:   this string object (known non-null)
   2146      *    ecx:   comp string object (known non-null)
   2147      */
   2148 DEFINE_FUNCTION art_quick_string_compareto
   2149     PUSH esi                      // push callee save reg
   2150     PUSH edi                      // push callee save reg
   2151     mov MIRROR_STRING_COUNT_OFFSET(%eax), %edx
   2152     mov MIRROR_STRING_COUNT_OFFSET(%ecx), %ebx
   2153     lea MIRROR_STRING_VALUE_OFFSET(%eax), %esi
   2154     lea MIRROR_STRING_VALUE_OFFSET(%ecx), %edi
   2155 #if (STRING_COMPRESSION_FEATURE)
   2156     /* Differ cases */
   2157     shrl    LITERAL(1), %edx
   2158     jnc     .Lstring_compareto_this_is_compressed
   2159     shrl    LITERAL(1), %ebx
   2160     jnc     .Lstring_compareto_that_is_compressed
   2161     jmp     .Lstring_compareto_both_not_compressed
   2162 .Lstring_compareto_this_is_compressed:
   2163     shrl    LITERAL(1), %ebx
   2164     jnc     .Lstring_compareto_both_compressed
   2165     /* If (this->IsCompressed() && that->IsCompressed() == false) */
   2166     mov     %edx, %eax
   2167     subl    %ebx, %eax
   2168     mov     %edx, %ecx
   2169     cmovg   %ebx, %ecx
   2170     /* Going into loop to compare each character */
   2171     jecxz   .Lstring_compareto_keep_length            // check loop counter (if 0, don't compare)
   2172 .Lstring_compareto_loop_comparison_this_compressed:
   2173     movzbl  (%esi), %edx                              // move *(this_cur_char) byte to long
   2174     movzwl  (%edi), %ebx                              // move *(that_cur_char) word to long
   2175     addl    LITERAL(1), %esi                          // ++this_cur_char (8-bit)
   2176     addl    LITERAL(2), %edi                          // ++that_cur_char (16-bit)
   2177     subl    %ebx, %edx
   2178     loope   .Lstring_compareto_loop_comparison_this_compressed
   2179     cmovne  %edx, %eax                        // return eax = *(this_cur_char) - *(that_cur_char)
   2180     jmp     .Lstring_compareto_return
   2181 .Lstring_compareto_that_is_compressed:
   2182     mov     %edx, %eax
   2183     subl    %ebx, %eax
   2184     mov     %edx, %ecx
   2185     cmovg   %ebx, %ecx
   2186     /* If (this->IsCompressed() == false && that->IsCompressed()) */
   2187     jecxz   .Lstring_compareto_keep_length            // check loop counter, if 0, don't compare
   2188 .Lstring_compareto_loop_comparison_that_compressed:
   2189     movzwl  (%esi), %edx                              // move *(this_cur_char) word to long
   2190     movzbl  (%edi), %ebx                              // move *(that_cur_char) byte to long
   2191     addl    LITERAL(2), %esi                          // ++this_cur_char (16-bit)
   2192     addl    LITERAL(1), %edi                          // ++that_cur_char (8-bit)
   2193     subl    %ebx, %edx
   2194     loope   .Lstring_compareto_loop_comparison_that_compressed
   2195     cmovne  %edx, %eax
   2196     jmp     .Lstring_compareto_return         // return eax = *(this_cur_char) - *(that_cur_char)
   2197 .Lstring_compareto_both_compressed:
   2198     /* Calculate min length and count diff */
   2199     mov     %edx, %ecx
   2200     mov     %edx, %eax
   2201     subl    %ebx, %eax
   2202     cmovg   %ebx, %ecx
   2203     jecxz   .Lstring_compareto_keep_length
   2204     repe    cmpsb
   2205     je      .Lstring_compareto_keep_length
   2206     movzbl  -1(%esi), %eax        // get last compared char from this string (8-bit)
   2207     movzbl  -1(%edi), %ecx        // get last compared char from comp string (8-bit)
   2208     jmp     .Lstring_compareto_count_difference
   2209 #endif // STRING_COMPRESSION_FEATURE
   2210 .Lstring_compareto_both_not_compressed:
   2211     /* Calculate min length and count diff */
   2212     mov     %edx, %ecx
   2213     mov     %edx, %eax
   2214     subl    %ebx, %eax
   2215     cmovg   %ebx, %ecx
   2216     /*
   2217      * At this point we have:
   2218      *   eax: value to return if first part of strings are equal
   2219      *   ecx: minimum among the lengths of the two strings
   2220      *   esi: pointer to this string data
   2221      *   edi: pointer to comp string data
   2222      */
   2223     jecxz .Lstring_compareto_keep_length
   2224     repe  cmpsw                   // find nonmatching chars in [%esi] and [%edi], up to length %ecx
   2225     je    .Lstring_compareto_keep_length
   2226     movzwl  -2(%esi), %eax        // get last compared char from this string (16-bit)
   2227     movzwl  -2(%edi), %ecx        // get last compared char from comp string (16-bit)
   2228 .Lstring_compareto_count_difference:
   2229     subl    %ecx, %eax
   2230 .Lstring_compareto_keep_length:
   2231 .Lstring_compareto_return:
   2232     POP edi                       // pop callee save reg
   2233     POP esi                       // pop callee save reg
   2234     ret
   2235 END_FUNCTION art_quick_string_compareto
   2236 
   2237 // Create a function `name` calling the ReadBarrier::Mark routine,
   2238 // getting its argument and returning its result through register
   2239 // `reg`, saving and restoring all caller-save registers.
   2240 //
   2241 // If `reg` is different from `eax`, the generated function follows a
   2242 // non-standard runtime calling convention:
   2243 // - register `reg` is used to pass the (sole) argument of this function
   2244 //   (instead of EAX);
   2245 // - register `reg` is used to return the result of this function
   2246 //   (instead of EAX);
   2247 // - EAX is treated like a normal (non-argument) caller-save register;
   2248 // - everything else is the same as in the standard runtime calling
   2249 //   convention (e.g. standard callee-save registers are preserved).
   2250 MACRO2(READ_BARRIER_MARK_REG, name, reg)
   2251     DEFINE_FUNCTION VAR(name)
   2252     // Null check so that we can load the lock word.
   2253     test REG_VAR(reg), REG_VAR(reg)
   2254     jz .Lret_rb_\name
   2255 .Lnot_null_\name:
   2256     // Check the mark bit, if it is 1 return.
   2257     testl LITERAL(LOCK_WORD_MARK_BIT_MASK_SHIFTED), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(reg))
   2258     jz .Lslow_rb_\name
   2259     ret
   2260 .Lslow_rb_\name:
   2261     PUSH eax
   2262     mov MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(reg)), %eax
   2263     add LITERAL(LOCK_WORD_STATE_FORWARDING_ADDRESS_OVERFLOW), %eax
   2264     // Jump if overflow, the only case where it overflows should be the forwarding address one.
   2265     // Taken ~25% of the time.
   2266     jnae .Lret_forwarding_address\name
   2267 
   2268     // Save all potentially live caller-save core registers.
   2269     mov 0(%esp), %eax
   2270     PUSH ecx
   2271     PUSH edx
   2272     PUSH ebx
   2273     // 8-byte align the stack to improve (8-byte) XMM register saving and restoring.
   2274     // and create space for caller-save floating-point registers.
   2275     subl MACRO_LITERAL(4 + 8 * 8), %esp
   2276     CFI_ADJUST_CFA_OFFSET(4 + 8 * 8)
   2277     // Save all potentially live caller-save floating-point registers.
   2278     movsd %xmm0, 0(%esp)
   2279     movsd %xmm1, 8(%esp)
   2280     movsd %xmm2, 16(%esp)
   2281     movsd %xmm3, 24(%esp)
   2282     movsd %xmm4, 32(%esp)
   2283     movsd %xmm5, 40(%esp)
   2284     movsd %xmm6, 48(%esp)
   2285     movsd %xmm7, 56(%esp)
   2286 
   2287     subl LITERAL(4), %esp            // alignment padding
   2288     CFI_ADJUST_CFA_OFFSET(4)
   2289     PUSH RAW_VAR(reg)                // pass arg1 - obj from `reg`
   2290     call SYMBOL(artReadBarrierMark)  // artReadBarrierMark(obj)
   2291     .ifnc RAW_VAR(reg), eax
   2292       movl %eax, REG_VAR(reg)        // return result into `reg`
   2293     .endif
   2294     addl LITERAL(8), %esp            // pop argument and remove padding
   2295     CFI_ADJUST_CFA_OFFSET(-8)
   2296 
   2297     // Restore floating-point registers.
   2298     movsd 0(%esp), %xmm0
   2299     movsd 8(%esp), %xmm1
   2300     movsd 16(%esp), %xmm2
   2301     movsd 24(%esp), %xmm3
   2302     movsd 32(%esp), %xmm4
   2303     movsd 40(%esp), %xmm5
   2304     movsd 48(%esp), %xmm6
   2305     movsd 56(%esp), %xmm7
   2306     // Remove floating-point registers and padding.
   2307     addl MACRO_LITERAL(8 * 8 + 4), %esp
   2308     CFI_ADJUST_CFA_OFFSET(-(8 * 8 + 4))
   2309     // Restore core regs, except `reg`, as it is used to return the
   2310     // result of this function (simply remove it from the stack instead).
   2311     POP_REG_NE ebx, RAW_VAR(reg)
   2312     POP_REG_NE edx, RAW_VAR(reg)
   2313     POP_REG_NE ecx, RAW_VAR(reg)
   2314     POP_REG_NE eax, RAW_VAR(reg)
   2315 .Lret_rb_\name:
   2316     ret
   2317 .Lret_forwarding_address\name:
   2318     // The overflow cleared the top bits.
   2319     sall LITERAL(LOCK_WORD_STATE_FORWARDING_ADDRESS_SHIFT), %eax
   2320     mov %eax, REG_VAR(reg)
   2321     POP_REG_NE eax, RAW_VAR(reg)
   2322     ret
   2323     END_FUNCTION VAR(name)
   2324 END_MACRO
   2325 
   2326 READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg00, eax
   2327 READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg01, ecx
   2328 READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg02, edx
   2329 READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg03, ebx
   2330 READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg05, ebp
   2331 // Note: There is no art_quick_read_barrier_mark_reg04, as register 4 (ESP)
   2332 // cannot be used to pass arguments.
   2333 READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg06, esi
   2334 READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg07, edi
   2335 
   2336 DEFINE_FUNCTION art_quick_read_barrier_slow
   2337     PUSH edx                         // pass arg3 - offset
   2338     PUSH ecx                         // pass arg2 - obj
   2339     PUSH eax                         // pass arg1 - ref
   2340     call SYMBOL(artReadBarrierSlow)  // artReadBarrierSlow(ref, obj, offset)
   2341     addl LITERAL(12), %esp           // pop arguments
   2342     CFI_ADJUST_CFA_OFFSET(-12)
   2343     ret
   2344 END_FUNCTION art_quick_read_barrier_slow
   2345 
   2346 DEFINE_FUNCTION art_quick_read_barrier_for_root_slow
   2347     subl LITERAL(8), %esp                   // alignment padding
   2348     CFI_ADJUST_CFA_OFFSET(8)
   2349     PUSH eax                                // pass arg1 - root
   2350     call SYMBOL(artReadBarrierForRootSlow)  // artReadBarrierForRootSlow(root)
   2351     addl LITERAL(12), %esp                  // pop argument and remove padding
   2352     CFI_ADJUST_CFA_OFFSET(-12)
   2353     ret
   2354 END_FUNCTION art_quick_read_barrier_for_root_slow
   2355 
   2356   /*
   2357      * On stack replacement stub.
   2358      * On entry:
   2359      *   [sp] = return address
   2360      *   [sp + 4] = stack to copy
   2361      *   [sp + 8] = size of stack
   2362      *   [sp + 12] = pc to call
   2363      *   [sp + 16] = JValue* result
   2364      *   [sp + 20] = shorty
   2365      *   [sp + 24] = thread
   2366      */
   2367 DEFINE_FUNCTION art_quick_osr_stub
   2368     // Save native callee saves.
   2369     PUSH ebp
   2370     PUSH ebx
   2371     PUSH esi
   2372     PUSH edi
   2373     SAVE_SIZE=20                   // 4 registers and the return address
   2374     mov 4+16(%esp), %esi           // ESI = argument array
   2375     mov 8+16(%esp), %ecx           // ECX = size of args
   2376     mov 12+16(%esp), %ebx          // EBX = pc to call
   2377     mov %esp, %ebp                 // Save stack pointer
   2378     CFI_DEF_CFA(ebp, SAVE_SIZE)    // CFA = ebp + SAVE_SIZE
   2379     CFI_REMEMBER_STATE
   2380     andl LITERAL(0xFFFFFFF0), %esp // Align stack
   2381     pushl %ebp                     // Save old stack pointer
   2382     subl LITERAL(12), %esp         // Align stack
   2383     movl LITERAL(0), (%esp)        // Store null for ArtMethod* slot
   2384     // ebp isn't properly spilled in the osr method, so we need use DWARF expression.
   2385     // NB: the CFI must be before the call since this is the address gdb will lookup.
   2386     // NB: gdb expects that cfa_expression returns the CFA value (not address to it).
   2387     CFI_ESCAPE(                    /* cfa = [sp + 12] + SAVE_SIZE */ \
   2388       0x0f, 6,                     /* DW_CFA_def_cfa_expression(len) */ \
   2389       0x92, 4, 12,                 /* DW_OP_bregx(reg,offset) */ \
   2390       0x06,                        /* DW_OP_deref */ \
   2391       0x23, SAVE_SIZE)             /* DW_OP_plus_uconst(val) */
   2392     call .Losr_entry
   2393     mov 12(%esp), %esp             // Restore stack pointer.
   2394     CFI_DEF_CFA(esp, SAVE_SIZE)    // CFA = esp + SAVE_SIZE
   2395 
   2396     // Restore callee saves.
   2397     POP edi
   2398     POP esi
   2399     POP ebx
   2400     POP ebp
   2401     mov 16(%esp), %ecx            // Get JValue result
   2402     mov %eax, (%ecx)              // Store the result assuming it is a long, int or Object*
   2403     mov %edx, 4(%ecx)             // Store the other half of the result
   2404     mov 20(%esp), %edx            // Get the shorty
   2405     cmpb LITERAL(68), (%edx)      // Test if result type char == 'D'
   2406     je .Losr_return_double_quick
   2407     cmpb LITERAL(70), (%edx)      // Test if result type char == 'F'
   2408     je .Losr_return_float_quick
   2409     ret
   2410 .Losr_return_double_quick:
   2411     movsd %xmm0, (%ecx)           // Store the floating point result
   2412     ret
   2413 .Losr_return_float_quick:
   2414     movss %xmm0, (%ecx)           // Store the floating point result
   2415     ret
   2416 .Losr_entry:
   2417     CFI_RESTORE_STATE
   2418     CFI_DEF_CFA(ebp, SAVE_SIZE)   // CFA = ebp + SAVE_SIZE
   2419     subl LITERAL(4), %ecx         // Given stack size contains pushed frame pointer, substract it.
   2420     subl %ecx, %esp
   2421     mov %esp, %edi                // EDI = beginning of stack
   2422     rep movsb                     // while (ecx--) { *edi++ = *esi++ }
   2423     jmp *%ebx
   2424 END_FUNCTION art_quick_osr_stub
   2425 
   2426 DEFINE_FUNCTION art_quick_invoke_polymorphic
   2427     SETUP_SAVE_REFS_AND_ARGS_FRAME  ebx, ebx       // Save frame.
   2428     mov %esp, %edx                                 // Remember SP.
   2429     subl LITERAL(16), %esp                         // Make space for JValue result.
   2430     CFI_ADJUST_CFA_OFFSET(16)
   2431     movl LITERAL(0), (%esp)                        // Initialize result to zero.
   2432     movl LITERAL(0), 4(%esp)
   2433     mov %esp, %eax                                 // Store pointer to JValue result in eax.
   2434     PUSH edx                                       // pass SP
   2435     pushl %fs:THREAD_SELF_OFFSET                   // pass Thread::Current()
   2436     CFI_ADJUST_CFA_OFFSET(4)
   2437     PUSH ecx                                       // pass receiver (method handle)
   2438     PUSH eax                                       // pass JResult
   2439     call SYMBOL(artInvokePolymorphic)              // artInvokePolymorphic(result, receiver, Thread*, SP)
   2440     subl LITERAL('A'), %eax                        // Eliminate out of bounds options
   2441     cmpb LITERAL('Z' - 'A'), %al
   2442     ja .Lcleanup_and_return
   2443     movzbl %al, %eax
   2444     call .Lput_eip_in_ecx
   2445 .Lbranch_start:
   2446     movl %ecx, %edx
   2447     add $(.Lhandler_table - .Lbranch_start), %edx  // Make EDX point to handler_table.
   2448     leal (%edx, %eax, 2), %eax                     // Calculate address of entry in table.
   2449     movzwl (%eax), %eax                            // Lookup relative branch in table.
   2450     addl %ecx, %eax                                // Add EIP relative offset.
   2451     jmp *%eax                                      // Branch to handler.
   2452 
   2453     // Handlers for different return types.
   2454 .Lstore_boolean_result:
   2455     movzbl 16(%esp), %eax                          // Copy boolean result to the accumulator.
   2456     jmp .Lcleanup_and_return
   2457 .Lstore_char_result:
   2458     movzwl 16(%esp), %eax                          // Copy char result to the accumulator.
   2459     jmp .Lcleanup_and_return
   2460 .Lstore_float_result:
   2461     movd 16(%esp), %xmm0                           // Copy float result to the context restored by
   2462     movd %xmm0, 36(%esp)                           // RESTORE_SAVE_REFS_ONLY_FRAME.
   2463     jmp .Lcleanup_and_return
   2464 .Lstore_double_result:
   2465     movsd 16(%esp), %xmm0                          // Copy double result to the context restored by
   2466     movsd %xmm0, 36(%esp)                          // RESTORE_SAVE_REFS_ONLY_FRAME.
   2467     jmp .Lcleanup_and_return
   2468 .Lstore_long_result:
   2469     movl 20(%esp), %edx                            // Copy upper-word of result to the context restored by
   2470     movl %edx, 72(%esp)                            // RESTORE_SAVE_REFS_ONLY_FRAME.
   2471     // Fall-through for lower bits.
   2472 .Lstore_int_result:
   2473     movl 16(%esp), %eax                            // Copy int result to the accumulator.
   2474     // Fall-through to clean up and return.
   2475 .Lcleanup_and_return:
   2476     addl LITERAL(32), %esp                         // Pop arguments and stack allocated JValue result.
   2477     CFI_ADJUST_CFA_OFFSET(-32)
   2478     RESTORE_SAVE_REFS_AND_ARGS_FRAME
   2479     RETURN_OR_DELIVER_PENDING_EXCEPTION
   2480 
   2481 .Lput_eip_in_ecx:                                  // Internal function that puts address of
   2482     movl 0(%esp), %ecx                             // next instruction into ECX when CALL
   2483     ret
   2484 
   2485     // Handler table to handlers for given type.
   2486 .Lhandler_table:
   2487 MACRO1(HANDLER_TABLE_ENTRY, handler_label)
   2488     // NB some tools require 16-bits for relocations. Shouldn't need adjusting.
   2489     .word RAW_VAR(handler_label) - .Lbranch_start
   2490 END_MACRO
   2491     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // A
   2492     HANDLER_TABLE_ENTRY(.Lstore_int_result)        // B (byte)
   2493     HANDLER_TABLE_ENTRY(.Lstore_char_result)       // C (char)
   2494     HANDLER_TABLE_ENTRY(.Lstore_double_result)     // D (double)
   2495     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // E
   2496     HANDLER_TABLE_ENTRY(.Lstore_float_result)      // F (float)
   2497     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // G
   2498     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // H
   2499     HANDLER_TABLE_ENTRY(.Lstore_int_result)        // I (int)
   2500     HANDLER_TABLE_ENTRY(.Lstore_long_result)       // J (long)
   2501     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // K
   2502     HANDLER_TABLE_ENTRY(.Lstore_int_result)        // L (object)
   2503     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // M
   2504     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // N
   2505     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // O
   2506     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // P
   2507     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // Q
   2508     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // R
   2509     HANDLER_TABLE_ENTRY(.Lstore_int_result)        // S (short)
   2510     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // T
   2511     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // U
   2512     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // V (void)
   2513     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // W
   2514     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // X
   2515     HANDLER_TABLE_ENTRY(.Lcleanup_and_return)      // Y
   2516     HANDLER_TABLE_ENTRY(.Lstore_boolean_result)    // Z (boolean)
   2517 
   2518 END_FUNCTION art_quick_invoke_polymorphic
   2519 
   2520 // Wrap ExecuteSwitchImpl in assembly method which specifies DEX PC for unwinding.
   2521 //  Argument 0: ESP+4: The context pointer for ExecuteSwitchImpl.
   2522 //  Argument 1: ESP+8: Pointer to the templated ExecuteSwitchImpl to call.
   2523 //  Argument 2: ESP+12: The value of DEX PC (memory address of the methods bytecode).
   2524 DEFINE_FUNCTION ExecuteSwitchImplAsm
   2525     PUSH ebx                 // Spill EBX; Increments ESP, so arg0 is at ESP+8 now.
   2526     mov 12(%esp), %eax       // EAX = C++ templated interpreter function
   2527     mov 16(%esp), %ebx       // EBX = DEX PC (callee save register)
   2528     mov 8(%esp), %ecx        // ECX = Context argument for the function
   2529     CFI_DEFINE_DEX_PC_WITH_OFFSET(0 /* EAX */, 3 /* EBX */, 0)
   2530 
   2531     sub LITERAL(4), %esp     // Alignment padding
   2532     CFI_ADJUST_CFA_OFFSET(4)
   2533     push %ecx                // Push argument
   2534     CFI_ADJUST_CFA_OFFSET(4)
   2535     call *%eax               // Call the wrapped function
   2536     addl LITERAL(8), %esp
   2537     CFI_ADJUST_CFA_OFFSET(-8)
   2538 
   2539     POP ebx                  // Restore EBX
   2540     ret
   2541 END_FUNCTION ExecuteSwitchImplAsm
   2542 
   2543     // TODO: implement these!
   2544 UNIMPLEMENTED art_quick_memcmp16
   2545