Home | History | Annotate | Download | only in arm

Lines Matching full:lsl

308       mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
334 mov(scratch, Operand(scratch, LSL, lsb));
363 DCHECK((src.shift_op() == ASR) || (src.shift_op() == LSL));
1157 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
1452 ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset.
1624 add(t0, scratch, Operand(t0, LSL, 15));
1628 add(t0, t0, Operand(t0, LSL, 2));
1632 mov(scratch, Operand(t0, LSL, 11));
1633 add(t0, t0, Operand(t0, LSL, 3));
1688 add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
1691 add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
1920 add(scratch2, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
1971 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
3138 Operand(scratch1, LSL, kPointerSizeLog2 + 1));
3158 Operand(scratch, LSL, kPointerSizeLog2 + 1));
3611 orr(result, result, Operand(ip, LSL, 12));
3613 orr(result, result, Operand(scratch, LSL, 16));
3735 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2));
3737 mov(mask_reg, Operand(ip, LSL, mask_reg));
3767 // LSL may overflow, making the check conservative.
3768 tst(load_scratch, Operand(mask_scratch, LSL, 1));