HomeSort by relevance Sort by last modified time
    Searched refs:Lsl (Results 1 - 25 of 26) sorted by null

1 2

  /external/vixl/examples/aarch64/
simulated-runtime-calls.cc 66 __ Lsl(w0, w0, 2);
  /art/compiler/utils/arm/
assembler_arm_vixl.h 94 WITH_FLAGS_DONT_CARE_RD_RN_OP(Lsl);
  /external/swiftshader/third_party/subzero/src/DartARM32/
assembler_arm.h 302 Shift shift = LSL, uint32_t shift_imm = 0, Mode am = Offset) {
305 if ((shift == LSL) && (shift_imm == 0)) {
    [all...]
assembler_arm.cc     [all...]
  /external/vixl/test/aarch32/
test-disasm-a32.cc 467 COMPARE_A32(Orn(r0, r1, Operand(r2, LSL, 1)),
468 "mvn r0, r2, lsl #1\n"
478 COMPARE_A32(Orn(r0, r1, Operand(r2, LSL, r3)),
479 "mvn r0, r2, lsl r3\n"
481 COMPARE_T32(Orn(r0, r1, Operand(r2, LSL, r3)),
482 "lsl r0, r2, r3\n"
571 COMPARE_T32(Rsc(r0, r1, Operand(r2, LSL, 1)),
573 "adc r0, r2, lsl #1\n");
591 COMPARE_T32(Rsc(r0, r1, Operand(r2, LSL, r3)),
592 "lsl r0, r2, r3\n
    [all...]
test-simulator-cond-rd-rn-operand-rm-a32.cc 146 M(Lsl) \
    [all...]
test-simulator-cond-rd-rn-operand-rm-t32.cc 146 M(Lsl) \
    [all...]
test-assembler-aarch32.cc 240 __ Adc(r5, r0, Operand(r1, LSL, 30));
268 __ Adc(r6, r0, Operand(r1, LSL, 30));
337 __ Adcs(r3, r2, Operand(r1, LSL, 4));
484 __ Add(r5, r0, Operand(r1, LSL, 8));
522 __ And(r4, r0, Operand(r1, LSL, 4));
571 __ Ands(r0, r0, Operand(r1, LSL, 4));
750 __ Lsl(r3, r1, 4);
776 __ Lsl(r3, r1, r9);
937 __ Bic(r4, r0, Operand(r1, LSL, 4));
986 __ Bics(r0, r0, Operand(r1, LSL, 4))
    [all...]
  /external/v8/src/arm64/
macro-assembler-arm64-inl.h 901 void MacroAssembler::Lsl(const Register& rd,
906 lsl(rd, rn, shift);
910 void MacroAssembler::Lsl(const Register& rd,
    [all...]
macro-assembler-arm64.h 486 inline void Lsl(const Register& rd, const Register& rn, unsigned shift);
487 inline void Lsl(const Register& rd, const Register& rn, const Register& rm);
    [all...]
code-stubs-arm64.cc 28 __ Mov(x5, Operand(x0, LSL, kPointerSizeLog2));
131 __ Lsl(result, mantissa, exponent);
901 __ Add(temp_argv, jssp, Operand(x0, LSL, kPointerSizeLog2));
    [all...]
  /art/compiler/linker/arm/
relative_patcher_thumb2.cc 137 // LDR (register) with correct base_reg, S=1 and option=011 (LDR Wt, [Xn, Xm, LSL #2]).
267 __ Ldr(ip, MemOperand(base_reg, ip, LSL, 2)); // Load the reference.
344 __ Lsl(root_reg, ip, LockWord::kForwardingAddressShift);
  /art/compiler/optimizing/
code_generator_arm_vixl.cc     [all...]
intrinsics_arm64.cc     [all...]
code_generator_arm64.cc     [all...]
intrinsics_arm_vixl.cc 144 __ Add(base, array, Operand(RegisterFrom(pos), vixl32::LSL, element_size_shift));
166 __ Add(end, base, Operand(RegisterFrom(copy_length), vixl32::LSL, element_size_shift));
    [all...]
  /external/swiftshader/third_party/subzero/src/
IceInstARM32.h 166 /// shift-by-immediate instructions (lsl, lsr, and asr), and shift-by-immediate
398 Lsl,
    [all...]
IceInstARM32.cpp 593 Asm->lsl(getDest(), getSrc(0), getSrc(1), SetFlags, getPredicate());
    [all...]
  /art/compiler/linker/arm64/
relative_patcher_arm64.cc 326 // LDR (register) with the correct base_reg, size=10 (32-bit), option=011 (extend = LSL),
327 // and S=1 (shift amount = 2 for 32-bit version), i.e. LDR Wt, [Xn, Xm, LSL #2].
432 __ Ldr(ip0.W(), MemOperand(base_reg, ip0, LSL, 2)); // Load the reference.
487 __ Tst(ip0.W(), Operand(ip0.W(), LSL, 1));
496 __ Lsl(root_reg, ip0.W(), LockWord::kForwardingAddressShift);
  /external/v8/src/compiler/arm64/
code-generator-arm64.cc 107 return Operand(InputRegister32(index), LSL, InputInt5(index + 1));
137 return Operand(InputRegister64(index), LSL, InputInt6(index + 1));
178 LSL, InputInt32(index + 2));
    [all...]
  /external/v8/src/builtins/arm64/
builtins-arm64.cc 150 __ Peek(x2, Operand(x4, LSL, kPointerSizeLog2));
232 __ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
278 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
355 __ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
429 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
618 __ Add(x4, x2, Operand(argc, LSL, kPointerSizeLog2));
730 __ Ldr(x3, MemOperand(x3, x0, LSL, kPointerSizeLog2));
887 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
942 __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
    [all...]
  /external/v8/src/crankshaft/arm64/
lithium-codegen-arm64.cc     [all...]
  /external/vixl/src/aarch32/
macro-assembler-aarch32.h     [all...]
  /external/vixl/src/aarch64/
macro-assembler-aarch64.h     [all...]
  /external/vixl/test/aarch64/
test-assembler-aarch64.cc 317 __ Mvn(w2, Operand(w0, LSL, 1));
318 __ Mvn(x3, Operand(x1, LSL, 2));
492 __ Mov(w13, Operand(w11, LSL, 1));
493 __ Mov(x14, Operand(x12, LSL, 2));
550 __ Mov(w13, Operand(w11, LSL, 1));
559 __ Mov(x23, Operand(x12, LSL, 1));
601 __ Orr(w3, w0, Operand(w1, LSL, 28));
602 __ Orr(x4, x0, Operand(x1, LSL, 32));
695 __ Orn(w3, w0, Operand(w1, LSL, 4));
696 __ Orn(x4, x0, Operand(x1, LSL, 4))
    [all...]

Completed in 782 milliseconds

1 2