Home | History | Annotate | Download | only in X86
      1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
      2 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s
      3 
      4 ; Function Attrs: norecurse nounwind readnone
      5 define zeroext i32 @TEST_mm512_test_epi16_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
      6 ; CHECK-LABEL: TEST_mm512_test_epi16_mask:
      7 ; CHECK:       # %bb.0: # %entry
      8 ; CHECK-NEXT:    vptestmw %zmm0, %zmm1, %k0
      9 ; CHECK-NEXT:    kmovd %k0, %eax
     10 ; CHECK-NEXT:    vzeroupper
     11 ; CHECK-NEXT:    retq
     12 entry:
     13   %and.i.i = and <8 x i64> %__B, %__A
     14   %0 = bitcast <8 x i64> %and.i.i to <32 x i16>
     15   %1 = icmp ne <32 x i16> %0, zeroinitializer
     16   %2 = bitcast <32 x i1> %1 to i32
     17   ret i32 %2
     18 }
     19 
     20 
     21 ; Function Attrs: norecurse nounwind readnone
     22 define zeroext i64 @TEST_mm512_test_epi8_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
     23 ; CHECK-LABEL: TEST_mm512_test_epi8_mask:
     24 ; CHECK:       # %bb.0: # %entry
     25 ; CHECK-NEXT:    vptestmb %zmm0, %zmm1, %k0
     26 ; CHECK-NEXT:    kmovq %k0, %rax
     27 ; CHECK-NEXT:    vzeroupper
     28 ; CHECK-NEXT:    retq
     29 entry:
     30   %and.i.i = and <8 x i64> %__B, %__A
     31   %0 = bitcast <8 x i64> %and.i.i to <64 x i8>
     32   %1 = icmp ne <64 x i8> %0, zeroinitializer
     33   %2 = bitcast <64 x i1> %1 to i64
     34   ret i64 %2
     35 }
     36 
     37 ; Function Attrs: norecurse nounwind readnone
     38 define zeroext i32 @TEST_mm512_mask_test_epi16_mask(i32 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
     39 ; CHECK-LABEL: TEST_mm512_mask_test_epi16_mask:
     40 ; CHECK:       # %bb.0: # %entry
     41 ; CHECK-NEXT:    vptestmw %zmm0, %zmm1, %k0
     42 ; CHECK-NEXT:    kmovd %k0, %eax
     43 ; CHECK-NEXT:    andl %edi, %eax
     44 ; CHECK-NEXT:    vzeroupper
     45 ; CHECK-NEXT:    retq
     46 entry:
     47   %and.i.i = and <8 x i64> %__B, %__A
     48   %0 = bitcast <8 x i64> %and.i.i to <32 x i16>
     49   %1 = icmp ne <32 x i16> %0, zeroinitializer
     50   %2 = bitcast i32 %__U to <32 x i1>
     51   %3 = and <32 x i1> %1, %2
     52   %4 = bitcast <32 x i1> %3 to i32
     53   ret i32 %4
     54 }
     55 
     56 ; Function Attrs: norecurse nounwind readnone
     57 define zeroext i64 @TEST_mm512_mask_test_epi8_mask(i64 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
     58 ; CHECK-LABEL: TEST_mm512_mask_test_epi8_mask:
     59 ; CHECK:       # %bb.0: # %entry
     60 ; CHECK-NEXT:    vptestmb %zmm0, %zmm1, %k0
     61 ; CHECK-NEXT:    kmovq %k0, %rax
     62 ; CHECK-NEXT:    andq %rdi, %rax
     63 ; CHECK-NEXT:    vzeroupper
     64 ; CHECK-NEXT:    retq
     65 entry:
     66   %and.i.i = and <8 x i64> %__B, %__A
     67   %0 = bitcast <8 x i64> %and.i.i to <64 x i8>
     68   %1 = icmp ne <64 x i8> %0, zeroinitializer
     69   %2 = bitcast i64 %__U to <64 x i1>
     70   %3 = and <64 x i1> %1, %2
     71   %4 = bitcast <64 x i1> %3 to i64
     72   ret i64 %4
     73 }
     74 
     75 ; Function Attrs: norecurse nounwind readnone
     76 define zeroext i32 @TEST_mm512_testn_epi16_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
     77 ; CHECK-LABEL: TEST_mm512_testn_epi16_mask:
     78 ; CHECK:       # %bb.0: # %entry
     79 ; CHECK-NEXT:    vptestnmw %zmm0, %zmm1, %k0
     80 ; CHECK-NEXT:    kmovd %k0, %eax
     81 ; CHECK-NEXT:    vzeroupper
     82 ; CHECK-NEXT:    retq
     83 entry:
     84   %and.i.i = and <8 x i64> %__B, %__A
     85   %0 = bitcast <8 x i64> %and.i.i to <32 x i16>
     86   %1 = icmp eq <32 x i16> %0, zeroinitializer
     87   %2 = bitcast <32 x i1> %1 to i32
     88   ret i32 %2
     89 }
     90 
     91 
     92 ; Function Attrs: norecurse nounwind readnone
     93 define zeroext i64 @TEST_mm512_testn_epi8_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
     94 ; CHECK-LABEL: TEST_mm512_testn_epi8_mask:
     95 ; CHECK:       # %bb.0: # %entry
     96 ; CHECK-NEXT:    vptestnmb %zmm0, %zmm1, %k0
     97 ; CHECK-NEXT:    kmovq %k0, %rax
     98 ; CHECK-NEXT:    vzeroupper
     99 ; CHECK-NEXT:    retq
    100 entry:
    101   %and.i.i = and <8 x i64> %__B, %__A
    102   %0 = bitcast <8 x i64> %and.i.i to <64 x i8>
    103   %1 = icmp eq <64 x i8> %0, zeroinitializer
    104   %2 = bitcast <64 x i1> %1 to i64
    105   ret i64 %2
    106 }
    107 
    108 ; Function Attrs: norecurse nounwind readnone
    109 define zeroext i32 @TEST_mm512_mask_testn_epi16_mask(i32 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
    110 ; CHECK-LABEL: TEST_mm512_mask_testn_epi16_mask:
    111 ; CHECK:       # %bb.0: # %entry
    112 ; CHECK-NEXT:    vptestnmw %zmm0, %zmm1, %k0
    113 ; CHECK-NEXT:    kmovd %k0, %eax
    114 ; CHECK-NEXT:    andl %edi, %eax
    115 ; CHECK-NEXT:    vzeroupper
    116 ; CHECK-NEXT:    retq
    117 entry:
    118   %and.i.i = and <8 x i64> %__B, %__A
    119   %0 = bitcast <8 x i64> %and.i.i to <32 x i16>
    120   %1 = icmp eq <32 x i16> %0, zeroinitializer
    121   %2 = bitcast i32 %__U to <32 x i1>
    122   %3 = and <32 x i1> %1, %2
    123   %4 = bitcast <32 x i1> %3 to i32
    124   ret i32 %4
    125 }
    126 
    127 ; Function Attrs: norecurse nounwind readnone
    128 define zeroext i64 @TEST_mm512_mask_testn_epi8_mask(i64 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
    129 ; CHECK-LABEL: TEST_mm512_mask_testn_epi8_mask:
    130 ; CHECK:       # %bb.0: # %entry
    131 ; CHECK-NEXT:    vptestnmb %zmm0, %zmm1, %k0
    132 ; CHECK-NEXT:    kmovq %k0, %rax
    133 ; CHECK-NEXT:    andq %rdi, %rax
    134 ; CHECK-NEXT:    vzeroupper
    135 ; CHECK-NEXT:    retq
    136 entry:
    137   %and.i.i = and <8 x i64> %__B, %__A
    138   %0 = bitcast <8 x i64> %and.i.i to <64 x i8>
    139   %1 = icmp eq <64 x i8> %0, zeroinitializer
    140   %2 = bitcast i64 %__U to <64 x i1>
    141   %3 = and <64 x i1> %1, %2
    142   %4 = bitcast <64 x i1> %3 to i64
    143   ret i64 %4
    144 }
    145 
    146