HomeSort by relevance Sort by last modified time
    Searched refs:a0 (Results 51 - 75 of 843) sorted by null

1 23 4 5 6 7 8 91011>>

  /dalvik/vm/compiler/template/mips/
funop.S 4 * specifies an instruction that performs "result = op a0".
10 * a0 = target dalvik register address
16 move rOBJ, a0 # save a0
18 LOAD(a0, a1) # a0<- vBB
20 $instr # v0<- op, a0-a3 changed
funopWider.S 9 * a0 = target dalvik register address
13 move rOBJ, a0 # save a0
15 LOAD(a0, a1) # a0<- vB
17 $instr # result<- op, a0-a3 changed
TEMPLATE_MONITOR_ENTER.S 8 * a0 - self pointer
14 sw zero, offThread_inJitCodeCache(a0) # record that we're not returning
23 li a0, kHeavyweightMonitor
TEMPLATE_MONITOR_ENTER_DEBUG.S 7 * a0 - self pointer
14 sw zero, offThread_inJitCodeCache(a0) # record that we're not returning
21 sub a0, rPC, 2 # roll dPC back to this monitor instruction
26 li a0, kHeavyweightMonitor
funopNarrower.S 4 * that specifies an instruction that performs "result = op a0/a1", where
5 * "result" is a 32-bit quantity in a0.
14 * a0 = target dalvik register address
18 move rINST, a0 # save a0
21 LOAD64(rARG0, rARG1, t0) # a0/a1<- vB/vB+1
23 $instr # v0<- op, a0-a3 changed
  /dalvik/vm/mterp/mips/
OP_CONST_WIDE_16.S 3 FETCH_S(a0, 1) # a0 <- ssssBBBB (sign-extended)
5 sra a1, a0, 31 # a1 <- ssssssss
9 STORE64(a0, a1, a3) # vAA <- a0/a1
OP_GOTO.S 10 sll a0, rINST, 16 # a0 <- AAxx0000
11 sra a1, a0, 24 # a1 <- ssssssAA (sign-extended)
19 lw a0, offThread_pJitProfTable(rSELF)
20 bltz a1, common_testUpdateProfile # (a0) check for trace hotness
OP_LONG_TO_INT.S 3 GET_OPA4(a0) # a0 <- A from 11:8
10 SET_VREG_GOTO(a2, a0, t0) # fp[A] <- a2
OP_MOVE.S 5 GET_OPA4(a0) # a0 <- A from 11:8
9 SET_VREG_GOTO(a2, a0, t0) # fp[A] <- a2
OP_MOVE_16.S 5 FETCH(a0, 1) # a0 <- AAAA
9 SET_VREG_GOTO(a2, a0, t0) # fp[AAAA] <- a2 and jump
OP_MOVE_FROM16.S 5 GET_OPA(a0) # a0 <- AA
9 SET_VREG_GOTO(a2, a0, t0) # fp[AA] <- a2
OP_SHR_LONG.S 9 FETCH(a0, 1) # a0 <- CCBB
11 and a3, a0, 255 # a3 <- BB
12 srl a0, a0, 8 # a0 <- CC
14 GET_VREG(a2, a0) # a2 <- vCC
15 LOAD64(a0, a1, a3) # a0/a1 <- vBB/vBB+1
20 srl v0, a0, a2 # rlo<- alo >> (shift&31
    [all...]
OP_USHR_LONG.S 9 FETCH(a0, 1) # a0 <- CCBB
11 and a3, a0, 255 # a3 <- BB
12 srl a0, a0, 8 # a0 <- CC
14 GET_VREG(a2, a0) # a2 <- vCC
15 LOAD64(a0, a1, a3) # a0/a1 <- vBB/vBB+1
20 srl v0, a0, a2 # rlo<- alo >> (shift&31
    [all...]
OP_CONST_WIDE.S 3 FETCH(a0, 1) # a0 <- bbbb (low)
7 or a0, a1 # a0 <- BBBBbbbb (low word)
15 STORE64(a0, a1, t1) # vAA <- a0/a1
OP_CONST_STRING_JUMBO.S 6 FETCH(a0, 1) # a0 <- bbbb (low)
12 or a1, a1, a0 # a1 <- BBBBbbbb
22 LOAD_rSELF_method(a0) # a0 <- self->method
23 LOAD_base_offMethod_clazz(a0, a0) # a0 <- method->clazz
OP_IPUT_WIDE_QUICK.S 4 GET_OPA4(a0) # a0 <- A(+)
7 EAS2(a3, rFP, a0) # a3 <- &fp[A]
8 LOAD64(a0, a1, a3) # a0/a1 <- fp[A]
13 addu a2, a2, a3 # obj.field (64 bits, aligned) <- a0/a1
14 STORE64(a0, a1, a2) # obj.field (64 bits, aligned) <- a0/a1
OP_THROW_VERIFICATION_ERROR.S 9 LOAD_rSELF_method(a0) # a0 <- self->method
OP_ARRAY_LENGTH.S 7 GET_VREG(a0, a1) # a0 <- vB (object ref)
9 beqz a0, common_errNullObject # yup, fail
11 LOAD_base_offArrayObject_length(a3, a0) # a3 <- array length
OP_CONST_WIDE_HIGH16.S 5 li a0, 0 # a0 <- 00000000
10 STORE64(a0, a1, a3) # vAA <- a0/a1
OP_IGET_WIDE.S 11 GET_OPB(a0) # a0 <- B
15 GET_VREG(rOBJ, a0) # rOBJ <- fp[B], the object pointer
16 LOAD_eas2(a0, a2, a1) # a0 <- resolved InstField ptr
18 bnez a0, .L${opcode}_finish # no, already resolved
21 LOAD_base_offMethod_clazz(a0, a2) # a0 <- method->clazz
24 move a0, v0
31 * a0 holds resolved fiel
    [all...]
OP_IPUT_WIDE.S 8 GET_OPB(a0) # a0 <- B
12 GET_VREG(rOBJ, a0) # rOBJ <- fp[B], the object pointer
13 LOAD_eas2(a0, a2, a1) # a0 <- resolved InstField ptr
15 bnez a0, .L${opcode}_finish # no, already resolved
18 LOAD_base_offMethod_clazz(a0, a2) # a0 <- method->clazz
21 move a0, v0
28 * a0 holds resolved fiel
    [all...]
OP_MOVE_RESULT_WIDE.S 6 LOAD64(a0, a1, a3) # a0/a1 <- retval.j
9 STORE64(a0, a1, a2) # fp[AA] <- a0/a1
  /external/clang/test/CodeGen/
x86_32-arguments-win32.c 4 // CHECK: define void @f1_2(%struct.s1* byval align 4 %a0)
10 void f1_2(struct s1 a0) {}
39 void f5_2(struct s5 a0) {}
42 // CHECK: define void @f6_2(%struct.s6* byval align 4 %a0)
47 void f6_2(struct s6 a0) {}
libcalls.c 6 void test_sqrt(float a0, double a1, long double a2) {
13 float l0 = sqrtf(a0);
33 void test_pow(float a0, double a1, long double a2) {
36 float l0 = powf(a0, a0);
56 void test_fma(float a0, double a1, long double a2) {
59 float l0 = fmaf(a0, a0, a0);
  /external/libvpx/libvpx/vp8/common/mips/dspr2/
reconinter_dspr2.c 34 unsigned int a0, a1, a2, a3; local
43 "ulw %[a0], 0(%[src]) \n\t"
47 "sw %[a0], 0(%[dst]) \n\t"
51 : [a0] "=&r" (a0), [a1] "=&r" (a1),
69 unsigned int a0, a1; local
78 "ulw %[a0], 0(%[src]) \n\t"
80 "sw %[a0], 0(%[dst]) \n\t"
82 : [a0] "=&r" (a0), [a1] "=&r" (a1
99 unsigned int a0, a1; local
    [all...]

Completed in 674 milliseconds

1 23 4 5 6 7 8 91011>>