HomeSort by relevance Sort by last modified time
    Searched refs:Asr (Results 1 - 18 of 18) sorted by null

  /art/compiler/utils/
assembler_thumb_test.cc 342 __ mov(R3, ShifterOperand(R4, ASR, 6));
349 __ mov(R8, ShifterOperand(R4, ASR, 6));
    [all...]
  /art/compiler/utils/arm/
assembler_arm32.h 209 void Asr(Register rd, Register rm, uint32_t shift_imm, bool setcc = false,
220 void Asr(Register rd, Register rm, Register rn, bool setcc = false,
assembler_thumb2.h 248 void Asr(Register rd, Register rm, uint32_t shift_imm, bool setcc = false,
259 void Asr(Register rd, Register rm, Register rn, bool setcc = false,
assembler_arm.h 626 virtual void Asr(Register rd, Register rm, uint32_t shift_imm, bool setcc = false,
637 virtual void Asr(Register rd, Register rm, Register rn, bool setcc = false,
assembler_arm32.cc     [all...]
assembler_thumb2.cc 727 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
919 case ASR: thumb_opcode = 2U /* 0b10 */; break;
    [all...]
  /external/v8/src/compiler/arm64/
code-generator-arm64.cc 289 ASSEMBLE_SHIFT(Asr, 64);
292 ASSEMBLE_SHIFT(Asr, 32);
  /external/v8/src/arm64/
macro-assembler-arm64-inl.h 313 void MacroAssembler::Asr(const Register& rd,
318 asr(rd, rn, shift);
322 void MacroAssembler::Asr(const Register& rd,
    [all...]
macro-assembler-arm64.h 293 inline void Asr(const Register& rd, const Register& rn, unsigned shift);
294 inline void Asr(const Register& rd, const Register& rn, const Register& rm);
    [all...]
lithium-codegen-arm64.cc     [all...]
macro-assembler-arm64.cc     [all...]
full-codegen-arm64.cc     [all...]
  /art/compiler/optimizing/
intrinsics_arm.cc 278 __ Asr(mask, in_reg_hi, 31);
287 __ Asr(mask, in_reg, 31);
    [all...]
code_generator_arm.cc     [all...]
code_generator_arm64.cc     [all...]
  /external/v8/test/cctest/
test-assembler-arm64.cc 301 __ Mvn(w6, Operand(w0, ASR, 11));
302 __ Mvn(x7, Operand(x1, ASR, 12));
374 __ Mov(w19, Operand(w11, ASR, 11));
375 __ Mov(x20, Operand(x12, ASR, 12));
530 __ Orr(w6, w0, Operand(w1, ASR, 4));
531 __ Orr(x7, x0, Operand(x1, ASR, 4));
627 __ Orn(w6, w0, Operand(w1, ASR, 1));
628 __ Orn(x7, x0, Operand(x1, ASR, 1));
696 __ And(w6, w0, Operand(w1, ASR, 20));
697 __ And(x7, x0, Operand(x1, ASR, 20))
    [all...]
  /external/vixl/src/vixl/a64/
macro-assembler-a64.h 938 void Asr(const Register& rd, const Register& rn, unsigned shift) {
943 asr(rd, rn, shift);
945 void Asr(const Register& rd, const Register& rn, const Register& rm)
    [all...]
  /external/vixl/test/
test-assembler-a64.cc 294 __ Mvn(w6, Operand(w0, ASR, 11));
295 __ Mvn(x7, Operand(x1, ASR, 12));
467 __ Mov(w19, Operand(w11, ASR, 11));
468 __ Mov(x20, Operand(x12, ASR, 12));
525 __ Orr(w6, w0, Operand(w1, ASR, 4));
526 __ Orr(x7, x0, Operand(x1, ASR, 4));
619 __ Orn(w6, w0, Operand(w1, ASR, 1));
620 __ Orn(x7, x0, Operand(x1, ASR, 1));
686 __ And(w6, w0, Operand(w1, ASR, 20));
687 __ And(x7, x0, Operand(x1, ASR, 20))
    [all...]

Completed in 409 milliseconds