Lines Matching full:uses
8 %tmp1 = load x86_mmx* %A ; <x86_mmx> [#uses=1]
9 %tmp3 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
12 %tmp4 = add <8 x i8> %tmp1a, %tmp3a ; <<8 x i8>> [#uses=2]
15 %tmp7 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
16 %tmp12 = tail call x86_mmx @llvm.x86.mmx.padds.b( x86_mmx %tmp4a, x86_mmx %tmp7 ) ; <x86_mmx> [#uses=2]
18 %tmp16 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
19 %tmp21 = tail call x86_mmx @llvm.x86.mmx.paddus.b( x86_mmx %tmp12, x86_mmx %tmp16 ) ; <x86_mmx> [#uses=2]
21 %tmp27 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
24 %tmp28 = sub <8 x i8> %tmp21a, %tmp27a ; <<8 x i8>> [#uses=2]
27 %tmp31 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
28 %tmp36 = tail call x86_mmx @llvm.x86.mmx.psubs.b( x86_mmx %tmp28a, x86_mmx %tmp31 ) ; <x86_mmx> [#uses=2]
30 %tmp40 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
31 %tmp45 = tail call x86_mmx @llvm.x86.mmx.psubus.b( x86_mmx %tmp36, x86_mmx %tmp40 ) ; <x86_mmx> [#uses=2]
33 %tmp51 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
36 %tmp52 = mul <8 x i8> %tmp45a, %tmp51a ; <<8 x i8>> [#uses=2]
39 %tmp57 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
41 %tmp58 = and <8 x i8> %tmp52, %tmp57a ; <<8 x i8>> [#uses=2]
44 %tmp63 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
46 %tmp64 = or <8 x i8> %tmp58, %tmp63a ; <<8 x i8>> [#uses=2]
49 %tmp69 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
52 %tmp70 = xor <8 x i8> %tmp64b, %tmp69a ; <<8 x i8>> [#uses=1]
61 %tmp1 = load x86_mmx* %A ; <x86_mmx> [#uses=1]
62 %tmp3 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
65 %tmp4 = add <2 x i32> %tmp1a, %tmp3a ; <<2 x i32>> [#uses=2]
68 %tmp9 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
70 %tmp10 = sub <2 x i32> %tmp4, %tmp9a ; <<2 x i32>> [#uses=2]
73 %tmp15 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
76 %tmp16 = mul <2 x i32> %tmp10b, %tmp15a ; <<2 x i32>> [#uses=2]
79 %tmp21 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
82 %tmp22 = and <2 x i32> %tmp16b, %tmp21a ; <<2 x i32>> [#uses=2]
85 %tmp27 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
88 %tmp28 = or <2 x i32> %tmp22b, %tmp27a ; <<2 x i32>> [#uses=2]
91 %tmp33 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
94 %tmp34 = xor <2 x i32> %tmp28b, %tmp33a ; <<2 x i32>> [#uses=1]
103 %tmp1 = load x86_mmx* %A ; <x86_mmx> [#uses=1]
104 %tmp3 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
107 %tmp4 = add <4 x i16> %tmp1a, %tmp3a ; <<4 x i16>> [#uses=2]
110 %tmp7 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
111 %tmp12 = tail call x86_mmx @llvm.x86.mmx.padds.w( x86_mmx %tmp4a, x86_mmx %tmp7 ) ; <x86_mmx> [#uses=2]
113 %tmp16 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
114 %tmp21 = tail call x86_mmx @llvm.x86.mmx.paddus.w( x86_mmx %tmp12, x86_mmx %tmp16 ) ; <x86_mmx> [#uses=2]
116 %tmp27 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
119 %tmp28 = sub <4 x i16> %tmp21a, %tmp27a ; <<4 x i16>> [#uses=2]
122 %tmp31 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
123 %tmp36 = tail call x86_mmx @llvm.x86.mmx.psubs.w( x86_mmx %tmp28a, x86_mmx %tmp31 ) ; <x86_mmx> [#uses=2]
125 %tmp40 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
126 %tmp45 = tail call x86_mmx @llvm.x86.mmx.psubus.w( x86_mmx %tmp36, x86_mmx %tmp40 ) ; <x86_mmx> [#uses=2]
128 %tmp51 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
131 %tmp52 = mul <4 x i16> %tmp45a, %tmp51a ; <<4 x i16>> [#uses=2]
134 %tmp55 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
135 %tmp60 = tail call x86_mmx @llvm.x86.mmx.pmulh.w( x86_mmx %tmp52a, x86_mmx %tmp55 ) ; <x86_mmx> [#uses=2]
137 %tmp64 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
138 %tmp69 = tail call x86_mmx @llvm.x86.mmx.pmadd.wd( x86_mmx %tmp60, x86_mmx %tmp64 ) ; <x86_mmx> [#uses=1]
139 %tmp70 = bitcast x86_mmx %tmp69 to x86_mmx ; <x86_mmx> [#uses=2]
141 %tmp75 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
144 %tmp76 = and <4 x i16> %tmp70a, %tmp75a ; <<4 x i16>> [#uses=2]
147 %tmp81 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
150 %tmp82 = or <4 x i16> %tmp76b, %tmp81a ; <<4 x i16>> [#uses=2]
153 %tmp87 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
156 %tmp88 = xor <4 x i16> %tmp82b, %tmp87a ; <<4 x i16>> [#uses=1]
167 %tmp1 = load x86_mmx* %A ; <x86_mmx> [#uses=1]
168 %tmp3 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
169 %tmp4 = tail call x86_mmx @llvm.x86.mmx.padd.b( x86_mmx %tmp1, x86_mmx %tmp3 ) ; <x86_mmx> [#uses=2]
171 %tmp7 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
172 %tmp12 = tail call x86_mmx @llvm.x86.mmx.padds.b( x86_mmx %tmp4, x86_mmx %tmp7 ) ; <x86_mmx> [#uses=2]
174 %tmp16 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
175 %tmp21 = tail call x86_mmx @llvm.x86.mmx.paddus.b( x86_mmx %tmp12, x86_mmx %tmp16 ) ; <x86_mmx> [#uses=2]
177 %tmp27 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
178 %tmp28 = tail call x86_mmx @llvm.x86.mmx.psub.b( x86_mmx %tmp21, x86_mmx %tmp27 ) ; <x86_mmx> [#uses=2]
180 %tmp31 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
181 %tmp36 = tail call x86_mmx @llvm.x86.mmx.psubs.b( x86_mmx %tmp28, x86_mmx %tmp31 ) ; <x86_mmx> [#uses=2]
183 %tmp40 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
184 %tmp45 = tail call x86_mmx @llvm.x86.mmx.psubus.b( x86_mmx %tmp36, x86_mmx %tmp40 ) ; <x86_mmx> [#uses=2]
186 %tmp51 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
190 %tmp52 = mul <8 x i8> %tmp51b, %tmp51aa ; <x86_mmx> [#uses=2]
194 %tmp57 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
195 %tmp58 = tail call x86_mmx @llvm.x86.mmx.pand( x86_mmx %tmp51, x86_mmx %tmp57 ) ; <x86_mmx> [#uses=2]
197 %tmp63 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
198 %tmp64 = tail call x86_mmx @llvm.x86.mmx.por( x86_mmx %tmp58, x86_mmx %tmp63 ) ; <x86_mmx> [#uses=2]
200 %tmp69 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
201 %tmp70 = tail call x86_mmx @llvm.x86.mmx.pxor( x86_mmx %tmp64, x86_mmx %tmp69 ) ; <x86_mmx> [#uses=2]
209 %tmp1 = load x86_mmx* %A ; <x86_mmx> [#uses=1]
210 %tmp3 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
211 %tmp4 = tail call x86_mmx @llvm.x86.mmx.padd.d( x86_mmx %tmp1, x86_mmx %tmp3 ) ; <x86_mmx> [#uses=2]
213 %tmp9 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
214 %tmp10 = tail call x86_mmx @llvm.x86.mmx.psub.d( x86_mmx %tmp4, x86_mmx %tmp9 ) ; <x86_mmx> [#uses=2]
216 %tmp15 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
219 %tmp16 = mul <2 x i32> %tmp10a, %tmp15a ; <x86_mmx> [#uses=2]
222 %tmp21 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
223 %tmp22 = tail call x86_mmx @llvm.x86.mmx.pand( x86_mmx %tmp16a, x86_mmx %tmp21 ) ; <x86_mmx> [#uses=2]
225 %tmp27 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
226 %tmp28 = tail call x86_mmx @llvm.x86.mmx.por( x86_mmx %tmp22, x86_mmx %tmp27 ) ; <x86_mmx> [#uses=2]
228 %tmp33 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
229 %tmp34 = tail call x86_mmx @llvm.x86.mmx.pxor( x86_mmx %tmp28, x86_mmx %tmp33 ) ; <x86_mmx> [#uses=2]
237 %tmp1 = load x86_mmx* %A ; <x86_mmx> [#uses=1]
238 %tmp3 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
239 %tmp4 = tail call x86_mmx @llvm.x86.mmx.padd.w( x86_mmx %tmp1, x86_mmx %tmp3 ) ; <x86_mmx> [#uses=2]
241 %tmp7 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
242 %tmp12 = tail call x86_mmx @llvm.x86.mmx.padds.w( x86_mmx %tmp4, x86_mmx %tmp7 ) ; <x86_mmx> [#uses=2]
244 %tmp16 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
245 %tmp21 = tail call x86_mmx @llvm.x86.mmx.paddus.w( x86_mmx %tmp12, x86_mmx %tmp16 ) ; <x86_mmx> [#uses=2]
247 %tmp27 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
248 %tmp28 = tail call x86_mmx @llvm.x86.mmx.psub.w( x86_mmx %tmp21, x86_mmx %tmp27 ) ; <x86_mmx> [#uses=2]
250 %tmp31 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
251 %tmp36 = tail call x86_mmx @llvm.x86.mmx.psubs.w( x86_mmx %tmp28, x86_mmx %tmp31 ) ; <x86_mmx> [#uses=2]
253 %tmp40 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
254 %tmp45 = tail call x86_mmx @llvm.x86.mmx.psubus.w( x86_mmx %tmp36, x86_mmx %tmp40 ) ; <x86_mmx> [#uses=2]
256 %tmp51 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
257 %tmp52 = tail call x86_mmx @llvm.x86.mmx.pmull.w( x86_mmx %tmp45, x86_mmx %tmp51 ) ; <x86_mmx> [#uses=2]
259 %tmp55 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
260 %tmp60 = tail call x86_mmx @llvm.x86.mmx.pmulh.w( x86_mmx %tmp52, x86_mmx %tmp55 ) ; <x86_mmx> [#uses=2]
262 %tmp64 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
263 %tmp69 = tail call x86_mmx @llvm.x86.mmx.pmadd.wd( x86_mmx %tmp60, x86_mmx %tmp64 ) ; <x86_mmx> [#uses=1]
264 %tmp70 = bitcast x86_mmx %tmp69 to x86_mmx ; <x86_mmx> [#uses=2]
266 %tmp75 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
267 %tmp76 = tail call x86_mmx @llvm.x86.mmx.pand( x86_mmx %tmp70, x86_mmx %tmp75 ) ; <x86_mmx> [#uses=2]
269 %tmp81 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
270 %tmp82 = tail call x86_mmx @llvm.x86.mmx.por( x86_mmx %tmp76, x86_mmx %tmp81 ) ; <x86_mmx> [#uses=2]
272 %tmp87 = load x86_mmx* %B ; <x86_mmx> [#uses=1]
273 %tmp88 = tail call x86_mmx @llvm.x86.mmx.pxor( x86_mmx %tmp82, x86_mmx %tmp87 ) ; <x86_mmx> [#uses=2]