Home | History | Annotate | Download | only in armv5te
      1 %verify "executed"
      2     /*
      3      * Array put, 64 bits.  vBB[vCC] <- vAA.
      4      *
      5      * Arrays of long/double are 64-bit aligned, so it's okay to use STRD.
      6      */
      7     /* aput-wide vAA, vBB, vCC */
      8     FETCH(r0, 1)                        @ r0<- CCBB
      9     mov     r9, rINST, lsr #8           @ r9<- AA
     10     and     r2, r0, #255                @ r2<- BB
     11     mov     r3, r0, lsr #8              @ r3<- CC
     12     GET_VREG(r0, r2)                    @ r0<- vBB (array object)
     13     GET_VREG(r1, r3)                    @ r1<- vCC (requested index)
     14     cmp     r0, #0                      @ null array object?
     15     beq     common_errNullObject        @ yes, bail
     16     ldr     r3, [r0, #offArrayObject_length]    @ r3<- arrayObj->length
     17     add     r0, r0, r1, lsl #3          @ r0<- arrayObj + index*width
     18     cmp     r1, r3                      @ compare unsigned index, length
     19     add     r9, rFP, r9, lsl #2         @ r9<- &fp[AA]
     20     bcc     .L${opcode}_finish          @ okay, continue below
     21     b       common_errArrayIndex        @ index >= length, bail
     22     @ May want to swap the order of these two branches depending on how the
     23     @ branch prediction (if any) handles conditional forward branches vs.
     24     @ unconditional forward branches.
     25 %break
     26 
     27 .L${opcode}_finish:
     28     FETCH_ADVANCE_INST(2)               @ advance rPC, load rINST
     29     ldmia   r9, {r2-r3}                 @ r2/r3<- vAA/vAA+1
     30     GET_INST_OPCODE(ip)                 @ extract opcode from rINST
     31     strd    r2, [r0, #offArrayObject_contents]  @ r2/r3<- vBB[vCC]
     32     GOTO_OPCODE(ip)                     @ jump to next instruction
     33