HomeSort by relevance Sort by last modified time
    Searched refs:Asr (Results 1 - 15 of 15) sorted by null

  /art/compiler/utils/
assembler_thumb_test.cc 413 __ mov(R3, ShifterOperand(R4, ASR, 6));
420 __ mov(R8, ShifterOperand(R4, ASR, 6));
    [all...]
  /art/compiler/utils/arm/
assembler_arm.h 587 virtual void Asr(Register rd, Register rm, uint32_t shift_imm, bool setcc = false,
598 virtual void Asr(Register rd, Register rm, Register rn, bool setcc = false,
assembler_arm32.h 204 void Asr(Register rd, Register rm, uint32_t shift_imm, bool setcc = false,
215 void Asr(Register rd, Register rm, Register rn, bool setcc = false,
assembler_thumb2.h 235 void Asr(Register rd, Register rm, uint32_t shift_imm, bool setcc = false,
246 void Asr(Register rd, Register rm, Register rn, bool setcc = false,
assembler_arm32.cc     [all...]
assembler_thumb2.cc 844 case ASR: thumb_opcode = 0b10; break;
1099 case ASR: opcode = 0b10; break;
1119 case ASR: opcode = 0b10; break;
    [all...]
  /external/chromium_org/v8/src/compiler/arm64/
code-generator-arm64.cc 289 ASSEMBLE_SHIFT(Asr, 64);
292 ASSEMBLE_SHIFT(Asr, 32);
  /external/chromium_org/v8/src/arm64/
macro-assembler-arm64-inl.h 313 void MacroAssembler::Asr(const Register& rd,
318 asr(rd, rn, shift);
322 void MacroAssembler::Asr(const Register& rd,
    [all...]
macro-assembler-arm64.h 293 inline void Asr(const Register& rd, const Register& rn, unsigned shift);
294 inline void Asr(const Register& rd, const Register& rn, const Register& rm);
    [all...]
lithium-codegen-arm64.cc     [all...]
macro-assembler-arm64.cc     [all...]
full-codegen-arm64.cc     [all...]
  /external/vixl/src/a64/
macro-assembler-a64.h 348 void Asr(const Register& rd, const Register& rn, unsigned shift) {
352 asr(rd, rn, shift);
354 void Asr(const Register& rd, const Register& rn, const Register& rm) {
    [all...]
  /external/chromium_org/v8/test/cctest/
test-assembler-arm64.cc 301 __ Mvn(w6, Operand(w0, ASR, 11));
302 __ Mvn(x7, Operand(x1, ASR, 12));
374 __ Mov(w19, Operand(w11, ASR, 11));
375 __ Mov(x20, Operand(x12, ASR, 12));
530 __ Orr(w6, w0, Operand(w1, ASR, 4));
531 __ Orr(x7, x0, Operand(x1, ASR, 4));
627 __ Orn(w6, w0, Operand(w1, ASR, 1));
628 __ Orn(x7, x0, Operand(x1, ASR, 1));
696 __ And(w6, w0, Operand(w1, ASR, 20));
697 __ And(x7, x0, Operand(x1, ASR, 20))
    [all...]
  /external/vixl/test/
test-assembler-a64.cc 266 __ Mvn(w6, Operand(w0, ASR, 11));
267 __ Mvn(x7, Operand(x1, ASR, 12));
432 __ Mov(w19, Operand(w11, ASR, 11));
433 __ Mov(x20, Operand(x12, ASR, 12));
490 __ Orr(w6, w0, Operand(w1, ASR, 4));
491 __ Orr(x7, x0, Operand(x1, ASR, 4));
579 __ Orn(w6, w0, Operand(w1, ASR, 1));
580 __ Orn(x7, x0, Operand(x1, ASR, 1));
646 __ And(w6, w0, Operand(w1, ASR, 20));
647 __ And(x7, x0, Operand(x1, ASR, 20))
    [all...]

Completed in 928 milliseconds