Home | History | Annotate | Download | only in AArch64
      1 ; RUN: llc -verify-machineinstrs < %s -mtriple=aarch64-none-linux-gnu | FileCheck %s --check-prefix=CHECK
      2 
      3 @var32 = global i32 0
      4 @var64 = global i64 0
      5 
      6 define void @test_extendb(i8 %var) {
      7 ; CHECK-LABEL: test_extendb:
      8 
      9   %sxt32 = sext i8 %var to i32
     10   store volatile i32 %sxt32, i32* @var32
     11 ; CHECK: sxtb {{w[0-9]+}}, {{w[0-9]+}}
     12 
     13   %sxt64 = sext i8 %var to i64
     14   store volatile i64 %sxt64, i64* @var64
     15 ; CHECK: sxtb {{x[0-9]+}}, {{w[0-9]+}}
     16 
     17 ; N.b. this doesn't actually produce a bitfield instruction at the
     18 ; moment, but it's still a good test to have and the semantics are
     19 ; correct.
     20   %uxt32 = zext i8 %var to i32
     21   store volatile i32 %uxt32, i32* @var32
     22 ; CHECK: and {{w[0-9]+}}, {{w[0-9]+}}, #0xff
     23 
     24   %uxt64 = zext i8 %var to i64
     25   store volatile i64 %uxt64, i64* @var64
     26 ; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xff
     27   ret void
     28 }
     29 
     30 define void @test_extendh(i16 %var) {
     31 ; CHECK-LABEL: test_extendh:
     32 
     33   %sxt32 = sext i16 %var to i32
     34   store volatile i32 %sxt32, i32* @var32
     35 ; CHECK: sxth {{w[0-9]+}}, {{w[0-9]+}}
     36 
     37   %sxt64 = sext i16 %var to i64
     38   store volatile i64 %sxt64, i64* @var64
     39 ; CHECK: sxth {{x[0-9]+}}, {{w[0-9]+}}
     40 
     41 ; N.b. this doesn't actually produce a bitfield instruction at the
     42 ; moment, but it's still a good test to have and the semantics are
     43 ; correct.
     44   %uxt32 = zext i16 %var to i32
     45   store volatile i32 %uxt32, i32* @var32
     46 ; CHECK: and {{w[0-9]+}}, {{w[0-9]+}}, #0xffff
     47 
     48   %uxt64 = zext i16 %var to i64
     49   store volatile i64 %uxt64, i64* @var64
     50 ; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xffff
     51   ret void
     52 }
     53 
     54 define void @test_extendw(i32 %var) {
     55 ; CHECK-LABEL: test_extendw:
     56 
     57   %sxt64 = sext i32 %var to i64
     58   store volatile i64 %sxt64, i64* @var64
     59 ; CHECK: sxtw {{x[0-9]+}}, {{w[0-9]+}}
     60 
     61   %uxt64 = zext i32 %var to i64
     62   store volatile i64 %uxt64, i64* @var64
     63 ; CHECK: ubfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #32
     64   ret void
     65 }
     66 
     67 define void @test_shifts(i32 %val32, i64 %val64) {
     68 ; CHECK-LABEL: test_shifts:
     69 
     70   %shift1 = ashr i32 %val32, 31
     71   store volatile i32 %shift1, i32* @var32
     72 ; CHECK: asr {{w[0-9]+}}, {{w[0-9]+}}, #31
     73 
     74   %shift2 = lshr i32 %val32, 8
     75   store volatile i32 %shift2, i32* @var32
     76 ; CHECK: lsr {{w[0-9]+}}, {{w[0-9]+}}, #8
     77 
     78   %shift3 = shl i32 %val32, 1
     79   store volatile i32 %shift3, i32* @var32
     80 ; CHECK: lsl {{w[0-9]+}}, {{w[0-9]+}}, #1
     81 
     82   %shift4 = ashr i64 %val64, 31
     83   store volatile i64 %shift4, i64* @var64
     84 ; CHECK: asr {{x[0-9]+}}, {{x[0-9]+}}, #31
     85 
     86   %shift5 = lshr i64 %val64, 8
     87   store volatile i64 %shift5, i64* @var64
     88 ; CHECK: lsr {{x[0-9]+}}, {{x[0-9]+}}, #8
     89 
     90   %shift6 = shl i64 %val64, 63
     91   store volatile i64 %shift6, i64* @var64
     92 ; CHECK: lsl {{x[0-9]+}}, {{x[0-9]+}}, #63
     93 
     94   %shift7 = ashr i64 %val64, 63
     95   store volatile i64 %shift7, i64* @var64
     96 ; CHECK: asr {{x[0-9]+}}, {{x[0-9]+}}, #63
     97 
     98   %shift8 = lshr i64 %val64, 63
     99   store volatile i64 %shift8, i64* @var64
    100 ; CHECK: lsr {{x[0-9]+}}, {{x[0-9]+}}, #63
    101 
    102   %shift9 = lshr i32 %val32, 31
    103   store volatile i32 %shift9, i32* @var32
    104 ; CHECK: lsr {{w[0-9]+}}, {{w[0-9]+}}, #31
    105 
    106   %shift10 = shl i32 %val32, 31
    107   store volatile i32 %shift10, i32* @var32
    108 ; CHECK: lsl {{w[0-9]+}}, {{w[0-9]+}}, #31
    109 
    110   ret void
    111 }
    112 
    113 ; LLVM can produce in-register extensions taking place entirely with
    114 ; 64-bit registers too.
    115 define void @test_sext_inreg_64(i64 %in) {
    116 ; CHECK-LABEL: test_sext_inreg_64:
    117 
    118 ; i1 doesn't have an official alias, but crops up and is handled by
    119 ; the bitfield ops.
    120   %trunc_i1 = trunc i64 %in to i1
    121   %sext_i1 = sext i1 %trunc_i1 to i64
    122   store volatile i64 %sext_i1, i64* @var64
    123 ; CHECK: sbfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #1
    124 
    125   %trunc_i8 = trunc i64 %in to i8
    126   %sext_i8 = sext i8 %trunc_i8 to i64
    127   store volatile i64 %sext_i8, i64* @var64
    128 ; CHECK: sxtb {{x[0-9]+}}, {{w[0-9]+}}
    129 
    130   %trunc_i16 = trunc i64 %in to i16
    131   %sext_i16 = sext i16 %trunc_i16 to i64
    132   store volatile i64 %sext_i16, i64* @var64
    133 ; CHECK: sxth {{x[0-9]+}}, {{w[0-9]+}}
    134 
    135   %trunc_i32 = trunc i64 %in to i32
    136   %sext_i32 = sext i32 %trunc_i32 to i64
    137   store volatile i64 %sext_i32, i64* @var64
    138 ; CHECK: sxtw {{x[0-9]+}}, {{w[0-9]+}}
    139   ret void
    140 }
    141 
    142 ; These instructions don't actually select to official bitfield
    143 ; operations, but it's important that we select them somehow:
    144 define void @test_zext_inreg_64(i64 %in) {
    145 ; CHECK-LABEL: test_zext_inreg_64:
    146 
    147   %trunc_i8 = trunc i64 %in to i8
    148   %zext_i8 = zext i8 %trunc_i8 to i64
    149   store volatile i64 %zext_i8, i64* @var64
    150 ; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xff
    151 
    152   %trunc_i16 = trunc i64 %in to i16
    153   %zext_i16 = zext i16 %trunc_i16 to i64
    154   store volatile i64 %zext_i16, i64* @var64
    155 ; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xffff
    156 
    157   %trunc_i32 = trunc i64 %in to i32
    158   %zext_i32 = zext i32 %trunc_i32 to i64
    159   store volatile i64 %zext_i32, i64* @var64
    160 ; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xffffffff
    161 
    162   ret void
    163 }
    164 
    165 define i64 @test_sext_inreg_from_32(i32 %in) {
    166 ; CHECK-LABEL: test_sext_inreg_from_32:
    167 
    168   %small = trunc i32 %in to i1
    169   %ext = sext i1 %small to i64
    170 
    171   ; Different registers are of course, possible, though suboptimal. This is
    172   ; making sure that a 64-bit "(sext_inreg (anyext GPR32), i1)" uses the 64-bit
    173   ; sbfx rather than just 32-bits.
    174 ; CHECK: sbfx x0, x0, #0, #1
    175   ret i64 %ext
    176 }
    177 
    178 
    179 define i32 @test_ubfx32(i32* %addr) {
    180 ; CHECK-LABEL: test_ubfx32:
    181 ; CHECK: ubfx {{w[0-9]+}}, {{w[0-9]+}}, #23, #3
    182 
    183    %fields = load i32, i32* %addr
    184    %shifted = lshr i32 %fields, 23
    185    %masked = and i32 %shifted, 7
    186    ret i32 %masked
    187 }
    188 
    189 define i64 @test_ubfx64(i64* %addr) {
    190 ; CHECK-LABEL: test_ubfx64:
    191 ; CHECK: ubfx {{x[0-9]+}}, {{x[0-9]+}}, #25, #10
    192    %fields = load i64, i64* %addr
    193    %shifted = lshr i64 %fields, 25
    194    %masked = and i64 %shifted, 1023
    195    ret i64 %masked
    196 }
    197 
    198 define i32 @test_sbfx32(i32* %addr) {
    199 ; CHECK-LABEL: test_sbfx32:
    200 ; CHECK: sbfx {{w[0-9]+}}, {{w[0-9]+}}, #6, #3
    201 
    202    %fields = load i32, i32* %addr
    203    %shifted = shl i32 %fields, 23
    204    %extended = ashr i32 %shifted, 29
    205    ret i32 %extended
    206 }
    207 
    208 define i64 @test_sbfx64(i64* %addr) {
    209 ; CHECK-LABEL: test_sbfx64:
    210 ; CHECK: sbfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #63
    211 
    212    %fields = load i64, i64* %addr
    213    %shifted = shl i64 %fields, 1
    214    %extended = ashr i64 %shifted, 1
    215    ret i64 %extended
    216 }
    217