HomeSort by relevance Sort by last modified time
    Searched full:sext (Results 76 - 100 of 729) sorted by null

1 2 34 5 6 7 8 91011>>

  /external/llvm/test/Transforms/IndVarSimplify/
2009-04-14-shorten_iv_vars.ll 1 ; RUN: opt < %s -indvars -S | not grep "sext"
20 %2 = sext i32 %i.0.reg2mem.0 to i64 ; <i64> [#uses=1]
24 %6 = sext i32 %i.0.reg2mem.0 to i64 ; <i64> [#uses=1]
28 %10 = sext i32 %i.0.reg2mem.0 to i64 ; <i64> [#uses=1]
35 %16 = sext i32 %15 to i64 ; <i64> [#uses=1]
40 %21 = sext i32 %20 to i64 ; <i64> [#uses=1]
44 %25 = sext i32 %13 to i64 ; <i64> [#uses=1]
51 %31 = sext i32 %30 to i64 ; <i64> [#uses=1]
56 %36 = sext i32 %35 to i64 ; <i64> [#uses=1]
60 %40 = sext i32 %28 to i64 ; <i64> [#uses=1
    [all...]
  /external/llvm/test/CodeGen/ARM/
vicmp.ll 16 %tmp4 = sext <8 x i1> %tmp3 to <8 x i8>
27 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
38 %tmp4 = sext <2 x i1> %tmp3 to <2 x i32>
49 %tmp4 = sext <16 x i1> %tmp3 to <16 x i8>
60 %tmp4 = sext <8 x i1> %tmp3 to <8 x i16>
71 %tmp4 = sext <4 x i1> %tmp3 to <4 x i32>
81 %tmp4 = sext <16 x i1> %tmp3 to <16 x i8>
91 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
101 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
111 %tmp4 = sext <4 x i1> %tmp3 to <4 x i32
    [all...]
vcge.ll 9 %tmp4 = sext <8 x i1> %tmp3 to <8 x i8>
19 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
29 %tmp4 = sext <2 x i1> %tmp3 to <2 x i32>
39 %tmp4 = sext <8 x i1> %tmp3 to <8 x i8>
49 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
59 %tmp4 = sext <2 x i1> %tmp3 to <2 x i32>
69 %tmp4 = sext <2 x i1> %tmp3 to <2 x i32>
79 %tmp4 = sext <16 x i1> %tmp3 to <16 x i8>
89 %tmp4 = sext <8 x i1> %tmp3 to <8 x i16>
99 %tmp4 = sext <4 x i1> %tmp3 to <4 x i32
    [all...]
fast-isel-crash2.ll 8 %r = sext i4 %t to i32
undef-sext.ll 10 %0 = sext i16 undef to i32
vceq.ll 9 %tmp4 = sext <8 x i1> %tmp3 to <8 x i8>
19 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
29 %tmp4 = sext <2 x i1> %tmp3 to <2 x i32>
39 %tmp4 = sext <2 x i1> %tmp3 to <2 x i32>
49 %tmp4 = sext <16 x i1> %tmp3 to <16 x i8>
59 %tmp4 = sext <8 x i1> %tmp3 to <8 x i16>
69 %tmp4 = sext <4 x i1> %tmp3 to <4 x i32>
79 %tmp4 = sext <4 x i1> %tmp3 to <4 x i32>
90 %tmp4 = sext <8 x i1> %tmp3 to <8 x i8>
vcgt.ll 10 %tmp4 = sext <8 x i1> %tmp3 to <8 x i8>
20 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
30 %tmp4 = sext <2 x i1> %tmp3 to <2 x i32>
40 %tmp4 = sext <8 x i1> %tmp3 to <8 x i8>
50 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
60 %tmp4 = sext <2 x i1> %tmp3 to <2 x i32>
70 %tmp4 = sext <2 x i1> %tmp3 to <2 x i32>
80 %tmp4 = sext <16 x i1> %tmp3 to <16 x i8>
90 %tmp4 = sext <8 x i1> %tmp3 to <8 x i16>
100 %tmp4 = sext <4 x i1> %tmp3 to <4 x i32
    [all...]
vcvt-cost.ll 1 ; We currently estimate the cost of sext/zext/trunc v8(v16)i32 <-> v8(v16)i8
14 ; COST: cost of 3 {{.*}} sext
15 %r = sext %T0_5 %v0 to %T1_5
61 ; COST: cost of 6 {{.*}} sext
62 %r = sext %TT0_5 %v0 to %TT1_5
108 ; COST: cost of 3 {{.*}} sext
109 %r = sext <4 x i16> %v0 to <4 x i64>
134 ; COST: cost of 6 {{.*}} sext
135 %r = sext <8 x i16> %v0 to <8 x i64>
  /external/llvm/test/CodeGen/SystemZ/
int-add-01.ll 12 %rhs = sext i16 %half to i32
24 %rhs = sext i16 %half to i32
36 %rhs = sext i16 %half to i32
48 %rhs = sext i16 %half to i32
62 %rhs = sext i16 %half to i32
74 %rhs = sext i16 %half to i32
86 %rhs = sext i16 %half to i32
100 %rhs = sext i16 %half to i32
114 %rhs = sext i16 %half to i32
128 %rhs = sext i16 %half to i3
    [all...]
int-mul-01.ll 12 %rhs = sext i16 %half to i32
24 %rhs = sext i16 %half to i32
36 %rhs = sext i16 %half to i32
48 %rhs = sext i16 %half to i32
62 %rhs = sext i16 %half to i32
74 %rhs = sext i16 %half to i32
86 %rhs = sext i16 %half to i32
100 %rhs = sext i16 %half to i32
114 %rhs = sext i16 %half to i32
128 %rhs = sext i16 %half to i3
    [all...]
int-sub-07.ll 12 %rhs = sext i16 %half to i32
24 %rhs = sext i16 %half to i32
36 %rhs = sext i16 %half to i32
48 %rhs = sext i16 %half to i32
62 %rhs = sext i16 %half to i32
74 %rhs = sext i16 %half to i32
86 %rhs = sext i16 %half to i32
100 %rhs = sext i16 %half to i32
114 %rhs = sext i16 %half to i32
128 %rhs = sext i16 %half to i3
    [all...]
int-cmp-05.ll 14 %i2 = sext i32 %unext to i64
25 %i2 = sext i32 %unext to i64
38 %i2 = sext i32 %unext to i64
51 %i2 = sext i32 %unext to i64
65 %i2 = sext i32 %unext to i64
77 %i2 = sext i32 %unext to i64
91 %i2 = sext i32 %unext to i64
105 %i2 = sext i32 %unext to i64
120 %i2 = sext i32 %unext to i64
137 %i2 = sext i32 %unext to i6
    [all...]
int-mul-07.ll 26 %ax = sext i32 %a to i64
27 %bx = sext i32 %b to i64
56 %ax = sext i32 %a to i64
57 %bx = sext i32 %b to i64
  /external/llvm/test/CodeGen/X86/
avx-cmp.ll 7 %s = sext <8 x i1> %bincmp to <8 x i32>
15 %s = sext <4 x i1> %bincmp to <4 x i64>
52 %x = sext <8 x i1> %bincmp to <8 x i32>
63 %x = sext <4 x i1> %bincmp to <4 x i64>
74 %x = sext <16 x i1> %bincmp to <16 x i16>
85 %x = sext <32 x i1> %bincmp to <32 x i8>
96 %x = sext <8 x i1> %bincmp to <8 x i32>
107 %x = sext <4 x i1> %bincmp to <4 x i64>
118 %x = sext <16 x i1> %bincmp to <16 x i16>
129 %x = sext <32 x i1> %bincmp to <32 x i8
    [all...]
pr15267.ll 45 %sext = sext <4 x i1> %wide.load35 to <4 x i64>
46 ret <4 x i64> %sext
vec_ext_inreg.ll 5 %c = sext <8 x i16> %b to <8 x i32>
11 %c = sext <3 x i16> %b to <3 x i32>
17 %c = sext <1 x i16> %b to <1 x i32>
2007-10-29-ExtendSetCC.ll 6 %tmp180181 = sext i16 %tmp180 to i32 ; <i32> [#uses=1]
  /external/llvm/test/Transforms/InstCombine/
2004-11-27-SetCCForCastLargerAndConstant.ll 3 ; %Y = sext i8 %X to i32
15 %Y = sext i8 %SB to i32 ; <i32> [#uses=1]
23 %Y = sext i8 %SB to i32 ; <i32> [#uses=1]
30 %Y = sext i8 %SB to i32 ; <i32> [#uses=1]
37 %Y = sext i8 %SB to i32
45 %Y = sext i8 %SB to i32 ; <i32> [#uses=1]
52 %Y = sext i8 %SB to i32 ; <i32> [#uses=1]
104 %Y = sext i8 %SB to i32 ; <i32> [#uses=1]
112 %Y = sext i8 %SB to i32 ; <i32> [#uses=1]
119 %Y = sext i8 %SB to i32 ; <i32> [#uses=1
    [all...]
  /external/llvm/test/Analysis/ScalarEvolution/
sext-iv-2.ll 3 ; CHECK: %tmp3 = sext i8 %tmp2 to i32
4 ; CHECK: --> (sext i8 {0,+,1}<%bb1> to i32) Exits: -1
6 ; CHECK: --> ((sext i8 {0,+,1}<%bb1> to i32) * {0,+,1}<%bb>) Exits: {0,+,-1}<%bb>
31 %tmp3 = sext i8 %tmp2 to i32 ; <i32> [#uses=1]
33 %tmp5 = sext i32 %i.02 to i64 ; <i64> [#uses=1]
34 %tmp6 = sext i32 %j.01 to i64 ; <i64> [#uses=1]
sext-iv-1.ll 2 ; RUN: | grep " --> (sext i. {.*,+,.*}<%bb1> to i64)" | count 5
4 ; Don't convert (sext {...,+,...}) to {sext(...),+,sext(...)} in cases
18 %2 = sext i9 %1 to i64 ; <i64> [#uses=1]
22 %6 = sext i7 %0 to i64 ; <i64> [#uses=1]
41 %2 = sext i9 %1 to i64 ; <i64> [#uses=1]
45 %6 = sext i8 %0 to i64 ; <i64> [#uses=1]
64 %2 = sext i9 %1 to i64 ; <i64> [#uses=1]
68 %6 = sext i8 %0 to i64 ; <i64> [#uses=1
    [all...]
fold.ll 5 %B = sext i12 %A to i16
39 %C = sext i16 %B to i30
42 %D = sext i16 %B to i32
45 %E = sext i16 %B to i34
  /external/llvm/test/CodeGen/Thumb/
ldr_ext.ll 32 %tmp1.s = sext i8 %tmp.s to i32
44 %tmp1.s = sext i16 %tmp.s to i32
55 %tmp1.s = sext i16 %tmp.s to i32
rev.ll 28 %tmp5.upgrd.2 = sext i16 %tmp5 to i32
38 %1 = sext i16 %0 to i32
53 %sext = shl i32 %or, 16
54 %conv8 = ashr exact i32 %sext, 16
  /external/llvm/test/CodeGen/PowerPC/
shl_elim.ll 8 %tmp456 = sext i16 %tmp45 to i32 ; <i32> [#uses=1]
sign_ext_inreg1.ll 9 %tmp456 = sext i16 %tmp45 to i32 ; <i32> [#uses=1]

Completed in 235 milliseconds

1 2 34 5 6 7 8 91011>>