/external/v8/src/regexp/arm64/ |
regexp-macro-assembler-arm64.cc | 289 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSizeInBits); 450 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSizeInBits); 868 __ Add(input_length, start_offset(), Operand(w10, LSR, 1)); 878 __ Lsr(capture_end.X(), capture_start.X(), kWRegSizeInBits); [all...] |
/art/compiler/utils/arm/ |
assembler_arm.h | 826 virtual void Lsr(Register rd, Register rm, uint32_t shift_imm, 830 Lsr(rd, rm, shift_imm, cond, kCcSet); 861 virtual void Lsr(Register rd, Register rm, Register rn, 865 Lsr(rd, rm, rn, cond, kCcSet); [all...] |
assembler_arm32.h | 220 virtual void Lsr(Register rd, Register rm, uint32_t shift_imm, 231 virtual void Lsr(Register rd, Register rm, Register rn,
|
assembler_thumb2.h | 267 virtual void Lsr(Register rd, Register rm, uint32_t shift_imm, 278 virtual void Lsr(Register rd, Register rm, Register rn, [all...] |
assembler_arm32.cc | [all...] |
assembler_thumb2.cc | [all...] |
/art/compiler/utils/ |
assembler_thumb_test.cc | 438 __ movs(R3, ShifterOperand(R4, LSR, 5)); 449 __ mov(R3, ShifterOperand(R4, LSR, 5), AL, kCcKeep); 456 __ movs(R8, ShifterOperand(R4, LSR, 5)); 470 __ Lsr(R3, R4, 5); 481 __ Lsr(R3, R4, 5, AL, kCcKeep); [all...] |
/art/compiler/optimizing/ |
code_generator_arm.cc | [all...] |
code_generator_arm64.cc | [all...] |
/external/llvm/lib/Transforms/InstCombine/ |
InstCombineInternal.h | 522 Value *SimplifyShrShlDemandedBits(Instruction *Lsr, Instruction *Sftl,
|
/external/v8/src/compiler/arm64/ |
code-generator-arm64.cc | 109 return Operand(InputRegister32(index), LSR, InputInt5(index + 1)); 137 return Operand(InputRegister64(index), LSR, InputInt6(index + 1)); [all...] |
/external/v8/src/arm64/ |
macro-assembler-arm64-inl.h | 919 void MacroAssembler::Lsr(const Register& rd, 924 lsr(rd, rn, shift); 928 void MacroAssembler::Lsr(const Register& rd, [all...] |
macro-assembler-arm64.h | 488 inline void Lsr(const Register& rd, const Register& rn, unsigned shift); 489 inline void Lsr(const Register& rd, const Register& rn, const Register& rm); [all...] |
macro-assembler-arm64.cc | 465 return Operand(dst, LSR, shift_high); [all...] |
builtins-arm64.cc | [all...] |
code-stubs-arm64.cc | 881 __ Lsr(exponent_abs, exponent_abs, 1); [all...] |
/external/v8/test/cctest/ |
test-assembler-arm64.cc | 304 __ Mvn(w4, Operand(w0, LSR, 3)); 305 __ Mvn(x5, Operand(x1, LSR, 4)); 377 __ Mov(w15, Operand(w11, LSR, 3)); 378 __ Mov(x18, Operand(x12, LSR, 4)); 534 __ Orr(x5, x0, Operand(x1, LSR, 4)); 631 __ Orn(x5, x0, Operand(x1, LSR, 1)); 700 __ And(x5, x0, Operand(x1, LSR, 1)); 775 __ Ands(w0, w0, Operand(w1, LSR, 4)); 829 __ Bic(x5, x0, Operand(x1, LSR, 1)); 915 __ Bics(w0, w0, Operand(w0, LSR, 1)) [all...] |
/external/v8/src/crankshaft/arm64/ |
lithium-codegen-arm64.cc | [all...] |
/external/vixl/src/vixl/a64/ |
macro-assembler-a64.h | [all...] |
/external/vixl/test/ |
test-assembler-a64.cc | 294 __ Mvn(w4, Operand(w0, LSR, 3)); 295 __ Mvn(x5, Operand(x1, LSR, 4)); 467 __ Mov(w15, Operand(w11, LSR, 3)); 468 __ Mov(x18, Operand(x12, LSR, 4)); 526 __ Orr(x5, x0, Operand(x1, LSR, 4)); 620 __ Orn(x5, x0, Operand(x1, LSR, 1)); 687 __ And(x5, x0, Operand(x1, LSR, 1)); 760 __ Ands(w0, w0, Operand(w1, LSR, 4)); 813 __ Bic(x5, x0, Operand(x1, LSR, 1)); 897 __ Bics(w0, w0, Operand(w0, LSR, 1)) [all...] |
/external/v8/src/full-codegen/arm64/ |
full-codegen-arm64.cc | [all...] |