Home | History | Annotate | Download | only in arm

Lines Matching defs:cond

75 void MacroAssembler::Jump(Register target, Condition cond) {
77 bx(target, cond);
79 mov(pc, Operand(target), LeaveCC, cond);
85 Condition cond) {
88 bx(ip, cond);
90 mov(pc, Operand(target, rmode), LeaveCC, cond);
96 Condition cond) {
98 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
103 Condition cond) {
106 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
110 int MacroAssembler::CallSize(Register target, Condition cond) {
119 void MacroAssembler::Call(Register target, Condition cond) {
125 blx(target, cond);
128 mov(lr, Operand(pc), LeaveCC, cond);
129 mov(pc, Operand(target), LeaveCC, cond);
131 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
136 Address target, RelocInfo::Mode rmode, Condition cond) {
138 Instr mov_instr = cond | MOV | LeaveCC;
149 Condition cond) {
166 blx(ip, cond);
171 mov(lr, Operand(pc), LeaveCC, cond);
172 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
173 mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond);
176 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start));
183 Condition cond) {
184 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
191 Condition cond) {
200 Call(reinterpret_cast<Address>(code.location()), rmode, cond);
201 ASSERT_EQ(CallSize(code, rmode, ast_id, cond),
206 void MacroAssembler::Ret(Condition cond) {
208 bx(lr, cond);
210 mov(pc, Operand(lr), LeaveCC, cond);
215 void MacroAssembler::Drop(int count, Condition cond) {
217 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
222 void MacroAssembler::Ret(int drop, Condition cond) {
223 Drop(drop, cond);
224 Ret(cond);
231 Condition cond) {
233 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
234 eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
235 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
237 mov(scratch, reg1, LeaveCC, cond);
238 mov(reg1, reg2, LeaveCC, cond);
239 mov(reg2, scratch, LeaveCC, cond);
260 void MacroAssembler::Move(Register dst, Register src, Condition cond) {
262 mov(dst, src, LeaveCC, cond);
277 Condition cond) {
281 mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, cond);
288 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
291 and_(dst, src1, src2, LeaveCC, cond);
297 Condition cond) {
301 and_(dst, src1, Operand(mask), LeaveCC, cond);
303 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
306 ubfx(dst, src1, lsb, width, cond);
312 Condition cond) {
316 and_(dst, src1, Operand(mask), LeaveCC, cond);
320 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
323 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
326 sbfx(dst, src1, lsb, width, cond);
336 Condition cond) {
349 bfi(dst, src, lsb, width, cond);
354 void MacroAssembler::Bfc(Register dst, int lsb, int width, Condition cond) {
360 bfc(dst, lsb, width, cond);
366 Condition cond) {
379 if (cond != al) {
380 b(NegateCondition(cond), &done); // Skip saturate if !condition.
391 usat(dst, satpos, src, cond);
398 Condition cond) {
399 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
405 Condition cond) {
406 str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
425 Condition cond,
427 ASSERT(cond == eq || cond == ne);
430 b(cond, branch);
664 const MemOperand& src, Condition cond) {
677 ldrd(dst1, dst2, src, cond);
683 ldr(dst2, src2, cond);
684 ldr(dst1, src, cond);
686 ldr(dst1, src, cond);
687 ldr(dst2, src2, cond);
692 ldr(dst2, MemOperand(src.rn(), 4, Offset), cond);
693 ldr(dst1, src, cond);
697 ldr(dst1, MemOperand(src.rn(), 4, PostIndex), cond);
698 ldr(dst2, src2, cond);
706 const MemOperand& dst, Condition cond) {
719 strd(src1, src2, dst, cond);
724 str(src1, dst, cond);
725 str(src2, dst2, cond);
729 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond);
730 str(src2, dst2, cond);
738 const Condition cond) {
739 vmrs(scratch, cond);
740 bic(scratch, scratch, Operand(bits_to_clear), LeaveCC, cond);
741 vmsr(scratch, cond);
747 const Condition cond) {
749 VFPCompareAndLoadFlags(src1, src2, pc, cond);
754 const Condition cond) {
756 VFPCompareAndLoadFlags(src1, src2, pc, cond);
763 const Condition cond) {
765 vcmp(src1, src2, cond);
766 vmrs(fpscr_flags, cond);
772 const Condition cond) {
774 vcmp(src1, src2, cond);
775 vmrs(fpscr_flags, cond);
780 const Condition cond) {
787 vmov(dst, kDoubleRegZero, cond);
789 vneg(dst, kDoubleRegZero, cond);
791 vmov(dst, imm, cond);
2128 void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
2130 Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond);
2134 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
2136 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
2748 void MacroAssembler::Assert(Condition cond, const char* msg) {
2750 Check(cond, msg);
2786 void MacroAssembler::Check(Condition cond, const char* msg) {
2788 b(cond, &L);
3756 void CodePatcher::EmitCondition(Condition cond) {
3758 instr = (instr & ~kCondMask) | cond;