Home | History | Annotate | Download | only in X86
      1 ; RUN: opt < %s -basicaa -slp-vectorizer -dce -S -mtriple=x86_64-apple-macosx10.8.0 -mcpu=corei7-avx | FileCheck %s
      2 
      3 target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
      4 target triple = "x86_64-apple-macosx10.8.0"
      5 
      6 ; Simple 3-pair chain with loads and stores
      7 ; CHECK: test1
      8 ; CHECK: store <2 x double>
      9 ; CHECK: ret
     10 define void @test1(double* %a, double* %b, double* %c) {
     11 entry:
     12   %i0 = load double, double* %a, align 8
     13   %i1 = load double, double* %b, align 8
     14   %mul = fmul double %i0, %i1
     15   %arrayidx3 = getelementptr inbounds double, double* %a, i64 1
     16   %i3 = load double, double* %arrayidx3, align 8
     17   %arrayidx4 = getelementptr inbounds double, double* %b, i64 1
     18   %i4 = load double, double* %arrayidx4, align 8
     19   %mul5 = fmul double %i3, %i4
     20   store double %mul, double* %c, align 8
     21   %arrayidx5 = getelementptr inbounds double, double* %c, i64 1
     22   store double %mul5, double* %arrayidx5, align 8
     23   ret void
     24 }
     25 
     26 ; Simple 3-pair chain with loads and stores, obfuscated with bitcasts
     27 ; CHECK: test2
     28 ; CHECK: store <2 x double>
     29 ; CHECK: ret
     30 define void @test2(double* %a, double* %b, i8* %e) {
     31 entry:
     32   %i0 = load double, double* %a, align 8
     33   %i1 = load double, double* %b, align 8
     34   %mul = fmul double %i0, %i1
     35   %arrayidx3 = getelementptr inbounds double, double* %a, i64 1
     36   %i3 = load double, double* %arrayidx3, align 8
     37   %arrayidx4 = getelementptr inbounds double, double* %b, i64 1
     38   %i4 = load double, double* %arrayidx4, align 8
     39   %mul5 = fmul double %i3, %i4
     40   %c = bitcast i8* %e to double*
     41   store double %mul, double* %c, align 8
     42   %carrayidx5 = getelementptr inbounds i8, i8* %e, i64 8
     43   %arrayidx5 = bitcast i8* %carrayidx5 to double*
     44   store double %mul5, double* %arrayidx5, align 8
     45   ret void
     46 }
     47 
     48 ; Don't vectorize volatile loads.
     49 ; CHECK: test_volatile_load
     50 ; CHECK-NOT: load <2 x double>
     51 ; CHECK: store <2 x double>
     52 ; CHECK: ret
     53 define void @test_volatile_load(double* %a, double* %b, double* %c) {
     54 entry:
     55   %i0 = load volatile double, double* %a, align 8
     56   %i1 = load volatile double, double* %b, align 8
     57   %mul = fmul double %i0, %i1
     58   %arrayidx3 = getelementptr inbounds double, double* %a, i64 1
     59   %i3 = load double, double* %arrayidx3, align 8
     60   %arrayidx4 = getelementptr inbounds double, double* %b, i64 1
     61   %i4 = load double, double* %arrayidx4, align 8
     62   %mul5 = fmul double %i3, %i4
     63   store double %mul, double* %c, align 8
     64   %arrayidx5 = getelementptr inbounds double, double* %c, i64 1
     65   store double %mul5, double* %arrayidx5, align 8
     66   ret void
     67 }
     68 
     69 ; Don't vectorize volatile stores.
     70 ; CHECK: test_volatile_store
     71 ; CHECK-NOT: store <2 x double>
     72 ; CHECK: ret
     73 define void @test_volatile_store(double* %a, double* %b, double* %c) {
     74 entry:
     75   %i0 = load double, double* %a, align 8
     76   %i1 = load double, double* %b, align 8
     77   %mul = fmul double %i0, %i1
     78   %arrayidx3 = getelementptr inbounds double, double* %a, i64 1
     79   %i3 = load double, double* %arrayidx3, align 8
     80   %arrayidx4 = getelementptr inbounds double, double* %b, i64 1
     81   %i4 = load double, double* %arrayidx4, align 8
     82   %mul5 = fmul double %i3, %i4
     83   store volatile double %mul, double* %c, align 8
     84   %arrayidx5 = getelementptr inbounds double, double* %c, i64 1
     85   store volatile double %mul5, double* %arrayidx5, align 8
     86   ret void
     87 }
     88 
     89 
     90