Home | History | Annotate | Download | only in codeflinger

Lines Matching refs:Rn

341         int s, int Rd, int Rn, uint32_t Op2)
398 case opADD: *mPC++ = A64_ADD_W(Rd, Rn, Rm, shift, amount); break;
399 case opAND: *mPC++ = A64_AND_W(Rd, Rn, Rm, shift, amount); break;
400 case opORR: *mPC++ = A64_ORR_W(Rd, Rn, Rm, shift, amount); break;
401 case opMVN: *mPC++ = A64_ORN_W(Rd, Rn, Rm, shift, amount); break;
402 case opSUB: *mPC++ = A64_SUB_W(Rd, Rn, Rm, shift, amount, s);break;
409 int s, int Rd, int Rn, uint32_t Op2)
420 dataProcessingCommon(opcode, s, Wd, Rn, Op2);
424 dataProcessingCommon(opSUB, 1, mTmpReg3, Rn, Op2);
428 dataProcessingCommon(opSUB, s, Wd, Rn, Op2);
446 dataProcessingCommon(opAND, s, Wd, Rn, mTmpReg3);
464 int s, int Rd, int Rn, uint32_t Op2)
474 *mPC++ = A64_ADD_X_Wm_SXTW(Rd, Rn, Rm, amount);
480 *mPC++ = A64_ADD_X_Wm_SXTW(Rd, Rn, Rm, amount);
490 *mPC++ = A64_ADD_X_Wm_SXTW(Rd, Rn, Rm, amount);
499 int s, int Rd, int Rn, uint32_t Op2)
508 *mPC++ = A64_SUB_X_Wm_SXTW(Rd, Rn, mTmpReg1, 0);
519 void ArmToArm64Assembler::MLA(int cc, int s,int Rd, int Rm, int Rs, int Rn)
523 *mPC++ = A64_MADD_W(Rd, Rm, Rs, Rn);
565 void ArmToArm64Assembler::BX(int /*cc*/, int /*Rn*/){
578 int Rd, int Rn, uint32_t op_type, uint32_t size)
581 if(Rn == SP)
582 Rn = XSP;
598 addrReg = Rn;
612 *mPC++ = A64_CSEL_X(Rn, mTmpReg2, Rn, cc);
618 *mPC++ = A64_LDRSTR_Wm_SXTW_0(op, size, Rd, Rn, mAddrMode.reg_offset);
625 *mPC++ = A64_LDRSTR_Wm_SXTW_0(op, size, Rd, Rn, mZeroReg);
634 void ArmToArm64Assembler::ADDR_LDR(int cc, int Rd, int Rn, uint32_t op_type)
636 return dataTransfer(opLDR, cc, Rd, Rn, op_type, 64);
638 void ArmToArm64Assembler::ADDR_STR(int cc, int Rd, int Rn, uint32_t op_type)
640 return dataTransfer(opSTR, cc, Rd, Rn, op_type, 64);
642 void ArmToArm64Assembler::LDR(int cc, int Rd, int Rn, uint32_t op_type)
644 return dataTransfer(opLDR, cc, Rd, Rn, op_type);
646 void ArmToArm64Assembler::LDRB(int cc, int Rd, int Rn, uint32_t op_type)
648 return dataTransfer(opLDRB, cc, Rd, Rn, op_type);
650 void ArmToArm64Assembler::STR(int cc, int Rd, int Rn, uint32_t op_type)
652 return dataTransfer(opSTR, cc, Rd, Rn, op_type);
655 void ArmToArm64Assembler::STRB(int cc, int Rd, int Rn, uint32_t op_type)
657 return dataTransfer(opSTRB, cc, Rd, Rn, op_type);
660 void ArmToArm64Assembler::LDRH(int cc, int Rd, int Rn, uint32_t op_type)
662 return dataTransfer(opLDRH, cc, Rd, Rn, op_type);
664 void ArmToArm64Assembler::LDRSB(int /*cc*/, int /*Rd*/, int /*Rn*/, uint32_t /*offset*/)
668 void ArmToArm64Assembler::LDRSH(int /*cc*/, int /*Rd*/, int /*Rn*/, uint32_t /*offset*/)
673 void ArmToArm64Assembler::STRH(int cc, int Rd, int Rn, uint32_t op_type)
675 return dataTransfer(opSTRH, cc, Rd, Rn, op_type);
682 int Rn, int W, uint32_t reg_list)
685 if(cc != AL || dir != IA || W == 0 || Rn != SP)
703 int Rn, int W, uint32_t reg_list)
706 if(cc != AL || dir != DB || W == 0 || Rn != SP)
726 void ArmToArm64Assembler::SWP(int /*cc*/, int /*Rn*/, int /*Rd*/, int /*Rm*/)
730 void ArmToArm64Assembler::SWPB(int /*cc*/, int /*Rn*/, int /*Rd*/, int /*Rm*/)
742 void ArmToArm64Assembler::PLD(int /*Rn*/, uint32_t /*offset*/) {
751 void ArmToArm64Assembler::QADD(int /*cc*/, int /*Rd*/, int /*Rm*/, int /*Rn*/)
756 void ArmToArm64Assembler::QDADD(int /*cc*/, int /*Rd*/, int /*Rm*/, int /*Rn*/)
761 void ArmToArm64Assembler::QSUB(int /*cc*/, int /*Rd*/, int /*Rm*/, int /*Rn*/)
766 void ArmToArm64Assembler::QDSUB(int /*cc*/, int /*Rd*/, int /*Rm*/, int /*Rn*/)
810 void ArmToArm64Assembler::SMLA(int cc, int xy, int Rd, int Rm, int Rs, int Rn)
817 *mPC++ = A64_MADD_W(Rd, mTmpReg1, mTmpReg2, Rn);
828 int /*Rd*/, int /*Rm*/, int /*Rs*/, int /*Rn*/)
852 void ArmToArm64Assembler::UBFX(int cc, int Rd, int Rn, int lsb, int width)
855 *mPC++ = A64_UBFM_W(Rd, Rn, lsb, lsb + width - 1);
1004 uint32_t Rn, uint32_t Rm)
1009 dataTransferOpName[op], Rt, Rn, Rm);
1010 return(dataTransferOpCode[op] | (Rm << 16) | (Rn << 5) | Rt);
1015 dataTransferOpName[op], Rt, Rn, Rm);
1016 return(dataTransferOpCode[op] | (0x1<<30) | (Rm<<16) | (Rn<<5)|Rt);
1021 uint32_t Rn, int32_t simm)
1023 if(Rn == 31)
1026 LOG_INSTR("STR W%d, [X%d, #%d]!\n", Rt, Rn, simm);
1029 return (0xB8 << 24) | (imm9 << 12) | (0x3 << 10) | (Rn << 5) | Rt;
1033 uint32_t Rn, int32_t simm)
1035 if(Rn == 31)
1038 LOG_INSTR("LDR W%d, [X%d], #%d\n",Rt, Rn, simm);
1042 (imm9 << 12) | (0x1 << 10) | (Rn << 5) | Rt;
1046 uint32_t Rn,
1050 LOG_INSTR("ADD X%d, X%d, W%d, SXTW #%d\n", Rd, Rn, Rm, amount);
1052 (0x6 << 13) | (amount << 10) | (Rn << 5) | Rd);
1057 uint32_t Rn,
1061 LOG_INSTR("SUB X%d, X%d, W%d, SXTW #%d\n", Rd, Rn, Rm, amount);
1063 (0x6 << 13) | (amount << 10) | (Rn << 5) | Rd);
1073 uint32_t ArmToArm64Assembler::A64_ADD_X(uint32_t Rd, uint32_t Rn,
1078 Rd, Rn, Rm, shift_codes[shift], amount);
1080 (amount << 10) |(Rn << 5) | Rd);
1082 uint32_t ArmToArm64Assembler::A64_ADD_IMM_X(uint32_t Rd, uint32_t Rn,
1085 LOG_INSTR("ADD X%d, X%d, #%d, LSL #%d\n", Rd, Rn, imm, shift);
1086 return (0x91 << 24) | ((shift/12) << 22) | (imm << 10) | (Rn << 5) | Rd;
1089 uint32_t ArmToArm64Assembler::A64_SUB_IMM_X(uint32_t Rd, uint32_t Rn,
1092 LOG_INSTR("SUB X%d, X%d, #%d, LSL #%d\n", Rd, Rn, imm, shift);
1093 return (0xD1 << 24) | ((shift/12) << 22) | (imm << 10) | (Rn << 5) | Rd;
1096 uint32_t ArmToArm64Assembler::A64_ADD_W(uint32_t Rd, uint32_t Rn,
1101 Rd, Rn, Rm, shift_codes[shift], amount);
1103 (amount << 10) |(Rn << 5) | Rd);
1106 uint32_t ArmToArm64Assembler::A64_SUB_W(uint32_t Rd, uint32_t Rn,
1114 Rd, Rn, Rm, shift_codes[shift], amount);
1116 (amount << 10) |(Rn << 5) | Rd);
1121 Rd, Rn, Rm, shift_codes[shift], amount);
1123 (amount << 10) |(Rn << 5) | Rd);
1127 uint32_t ArmToArm64Assembler::A64_AND_W(uint32_t Rd, uint32_t Rn,
1132 Rd, Rn, Rm, shift_codes[shift], amount);
1134 (amount << 10) |(Rn << 5) | Rd);
1137 uint32_t ArmToArm64Assembler::A64_ORR_W(uint32_t Rd, uint32_t Rn,
1142 Rd, Rn, Rm, shift_codes[shift], amount);
1144 (amount << 10) |(Rn << 5) | Rd);
1147 uint32_t ArmToArm64Assembler::A64_ORN_W(uint32_t Rd, uint32_t Rn,
1152 Rd, Rn, Rm, shift_codes[shift], amount);
1154 (amount << 10) |(Rn << 5) | Rd);
1157 uint32_t ArmToArm64Assembler::A64_CSEL_X(uint32_t Rd, uint32_t Rn,
1160 LOG_INSTR("CSEL X%d, X%d, X%d, %s\n", Rd, Rn, Rm, cc_codes[cond]);
1161 return ((0x9A << 24)|(0x1 << 23)|(Rm << 16) |(cond << 12)| (Rn << 5) | Rd);
1164 uint32_t ArmToArm64Assembler::A64_CSEL_W(uint32_t Rd, uint32_t Rn,
1167 LOG_INSTR("CSEL W%d, W%d, W%d, %s\n", Rd, Rn, Rm, cc_codes[cond]);
1168 return ((0x1A << 24)|(0x1 << 23)|(Rm << 16) |(cond << 12)| (Rn << 5) | Rd);
1171 uint32_t ArmToArm64Assembler::A64_RET(uint32_t Rn)
1173 LOG_INSTR("RET X%d\n", Rn);
1174 return ((0xD6 << 24) | (0x1 << 22) | (0x1F << 16) | (Rn << 5));
1198 uint32_t ArmToArm64Assembler::A64_SMADDL(uint32_t Rd, uint32_t Rn,
1201 LOG_INSTR("SMADDL X%d, W%d, W%d, X%d\n",Rd, Rn, Rm, Ra);
1202 return ((0x9B << 24) | (0x1 << 21) | (Rm << 16)|(Ra << 10)|(Rn << 5) | Rd);
1205 uint32_t ArmToArm64Assembler::A64_MADD_W(uint32_t Rd, uint32_t Rn,
1208 LOG_INSTR("MADD W%d, W%d, W%d, W%d\n",Rd, Rn, Rm, Ra);
1209 return ((0x1B << 24) | (Rm << 16) | (Ra << 10) |(Rn << 5) | Rd);
1212 uint32_t ArmToArm64Assembler::A64_SBFM_W(uint32_t Rd, uint32_t Rn,
1215 LOG_INSTR("SBFM W%d, W%d, #%d, #%d\n", Rd, Rn, immr, imms);
1216 return ((0x13 << 24) | (immr << 16) | (imms << 10) | (Rn << 5) | Rd);
1219 uint32_t ArmToArm64Assembler::A64_UBFM_W(uint32_t Rd, uint32_t Rn,
1222 LOG_INSTR("UBFM W%d, W%d, #%d, #%d\n", Rd, Rn, immr, imms);
1223 return ((0x53 << 24) | (immr << 16) | (imms << 10) | (Rn << 5) | Rd);
1226 uint32_t ArmToArm64Assembler::A64_UBFM_X(uint32_t Rd, uint32_t Rn,
1229 LOG_INSTR("UBFM X%d, X%d, #%d, #%d\n", Rd, Rn, immr, imms);
1231 (immr << 16) | (imms << 10) | (Rn << 5) | Rd);
1234 uint32_t ArmToArm64Assembler::A64_EXTR_W(uint32_t Rd, uint32_t Rn,
1237 LOG_INSTR("EXTR W%d, W%d, W%d, #%d\n", Rd, Rn, Rm, lsb);
1238 return (0x13 << 24)|(0x1 << 23) | (Rm << 16) | (lsb << 10)|(Rn << 5) | Rd;