/external/llvm/test/CodeGen/X86/ |
sse42_64.ll | 3 declare i64 @llvm.x86.sse42.crc32.64.8(i64, i8) nounwind 4 declare i64 @llvm.x86.sse42.crc32.64.64(i64, i64) nounwind 6 define i64 @crc32_64_8(i64 %a, i8 %b) nounwind { 7 %tmp = call i64 @llvm.x86.sse42.crc32.64.8(i64 %a, i8 %b) 8 ret i64 %tm [all...] |
2009-06-02-RewriterBug.ll | 4 define void @sha256_block1(i32* nocapture %arr, i8* nocapture %in, i64 %num) nounwind { 12 %indvar2787 = phi i64 [ 0, %bb.nph ], [ %indvar.next2788, %for.end ] ; <i64> [#uses=2] 13 %tmp2791 = mul i64 %indvar2787, 44 ; <i64> [#uses=0] 14 %ctg22996 = getelementptr i8* %in, i64 0 ; <i8*> [#uses=1] 15 %conv = zext i32 undef to i64 ; <i64> [#uses=1] 16 %conv11 = zext i32 undef to i64 ; <i64> [#uses=1 [all...] |
2009-04-16-SpillerUnfold.ll | 5 %struct.SHA512_CTX = type { [8 x i64], i64, i64, %struct.anon, i32, i32 } 6 %struct.anon = type { [16 x i64] } 7 @K512 = external constant [80 x i64], align 32 ; <[80 x i64]*> [#uses=2] 9 define fastcc void @sha512_block_data_order(%struct.SHA512_CTX* nocapture %ctx, i8* nocapture %in, i64 %num) nounwind ssp { 14 %e.0489 = phi i64 [ 0, %entry ], [ %e.0, %bb349 ] ; <i64> [#uses=3] 15 %b.0472 = phi i64 [ 0, %entry ], [ %87, %bb349 ] ; <i64> [#uses=2 [all...] |
2007-10-31-extractelement-i64.ll | 3 target datalayout = "e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:32:64-f32:32:32-f64:32:64-v64:64:64-v128:128:128-a0:0:64-f80:128:128" 6 define <1 x i64> @a(<2 x i64> %__A) { 8 %__A_addr = alloca <2 x i64> ; <<2 x i64>*> [#uses=2] 9 %retval = alloca <1 x i64>, align 8 ; <<1 x i64>*> [#uses=3] 11 store <2 x i64> %__A, <2 x i64>* %__A_addr 12 %tmp = load <2 x i64>* %__A_addr, align 16 ; <<2 x i64>> [#uses=1 [all...] |
h-registers-1.ll | 10 define i64 @foo(i64 %a, i64 %b, i64 %c, i64 %d, 11 i64 %e, i64 %f, i64 %g, i64 %h) { 12 %sa = lshr i64 %a, [all...] |
2006-07-10-InlineAsmAConstraint.ll | 4 define i64 @test() { 5 %tmp.i5 = call i64 asm sideeffect "rdtsc", "=A,~{dirflag},~{fpsr},~{flags}"( ) ; <i64> [#uses=1] 6 ret i64 %tmp.i5
|
2007-11-04-LiveVariablesBug.ll | 4 define void @xor_sse_2(i64 %bytes, i64* %p1, i64* %p2) { 6 %p2_addr = alloca i64* ; <i64**> [#uses=2] 8 store i64* %p2, i64** %p2_addr, align 8 9 %tmp1 = lshr i64 %bytes, 8 ; <i64> [#uses=1] 10 %tmp12 = trunc i64 %tmp1 to i32 ; <i32> [#uses=2 [all...] |
neg-shl-add.ll | 6 define i64 @foo(i64 %x, i64 %y, i64 %n) nounwind { 7 %a = sub i64 0, %y 8 %b = shl i64 %a, %n 9 %c = add i64 %b, %x 10 ret i64 %c 12 define i64 @boo(i64 %x, i64 %y, i64 %n) nounwind [all...] |
/external/llvm/test/CodeGen/ARM/ |
long.ll | 3 define i64 @f1() { 6 ret i64 0 9 define i64 @f2() { 12 ret i64 1 15 define i64 @f3() { 19 ret i64 2147483647 22 define i64 @f4() { 26 ret i64 2147483648 29 define i64 @f5() { 34 ret i64 922337203685477580 [all...] |
2007-04-30-CombinerCrash.ll | 3 target datalayout = "e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:32:32-f32:32:32-f64:32:32-v64:64:64-v128:128:128-a0:0:64" 5 %struct.CHESS_POSITION = type { i64, i64, i64, i64, i64, i64, i64, i64, i64, i64, i64, i64, i64, i64, i64, i64, i64, i64, i32, i32, i8, i8, [64 x i8], i8, i8, i8, i8, i8 [all...] |
formal.ll | 3 declare void @bar(i64 %x, i64 %y) 6 call void @bar(i64 2, i64 3)
|
ret_i64_arg_split.ll | 3 define i64 @test_i64_arg_split(i64 %a1, i32 %a2, i64 %a3) { 4 ret i64 %a3
|
/external/llvm/test/Transforms/InstCombine/ |
constant-fold-gep.ll | 2 target datalayout = "E-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:32:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64" 12 ; CHECK: store i32 1, i32* getelementptr inbounds ([3 x %struct.X]* @Y, i64 0, i64 0, i32 0, i64 0), align 8 13 store i32 1, i32* getelementptr ([3 x %struct.X]* @Y, i64 0, i64 0, i32 0, i64 0), align 4 14 ; CHECK: store i32 1, i32* getelementptr inbounds ([3 x %struct.X]* @Y, i64 0, i64 0, i32 0, i64 1), align [all...] |
/external/llvm/test/CodeGen/Mips/ |
mips64shift.ll | 3 define i64 @f0(i64 %a0, i64 %a1) nounwind readnone { 6 %shl = shl i64 %a0, %a1 7 ret i64 %shl 10 define i64 @f1(i64 %a0, i64 %a1) nounwind readnone { 13 %shr = ashr i64 %a0, %a1 14 ret i64 %sh [all...] |
/external/llvm/test/Transforms/GVN/ |
2011-07-07-MatchIntrinsicExtract.ll | 4 %0 = type { i64, i1 } 6 define i64 @test1(i64 %a, i64 %b) nounwind ssp { 8 %uadd = tail call %0 @llvm.uadd.with.overflow.i64(i64 %a, i64 %b) 10 %add1 = add i64 %a, %b 11 ret i64 %add1 18 define i64 @test2(i64 %a, i64 %b) nounwind ssp [all...] |
2009-11-12-MemDepMallocBitCast.ll | 5 define i64 @test() { 6 %1 = tail call i8* @malloc(i64 mul (i64 4, i64 ptrtoint (i64* getelementptr (i64* null, i64 1) to i64))) ; <i8*> [#uses=2] 8 %X = bitcast i8* %1 to i64* ; <i64*> [#uses=1 [all...] |
/external/llvm/test/CodeGen/Thumb2/ |
thumb2-sbc.ll | 3 define i64 @f1(i64 %a, i64 %b) { 6 %tmp = sub i64 %a, %b 7 ret i64 %tmp 11 define i64 @f2(i64 %a) { 15 %tmp = sub i64 %a, 734439407618 16 ret i64 %tmp 20 define i64 @f3(i64 %a) [all...] |
thumb2-adc.ll | 4 define i64 @f1(i64 %a) { 7 %tmp = add i64 %a, 734439407618 8 ret i64 %tmp 12 define i64 @f2(i64 %a) { 15 %tmp = add i64 %a, 5066626890203138 16 ret i64 %tmp 20 define i64 @f3(i64 %a) [all...] |
/external/llvm/test/CodeGen/Generic/ |
2008-02-04-Ctlz.ll | 5 define i32 @main(i64 %arg) nounwind { 7 %tmp37 = tail call i64 @llvm.ctlz.i64( i64 %arg ) ; <i64> [#uses=1] 8 %tmp47 = tail call i64 @llvm.cttz.i64( i64 %arg ) ; <i64> [#uses=1] 9 %tmp57 = tail call i64 @llvm.ctpop.i64( i64 %arg ) ; <i64> [#uses=1 [all...] |
/external/llvm/test/CodeGen/Alpha/ |
cmov.ll | 4 define i64 @cmov_lt(i64 %a, i64 %c) { 6 %tmp.1 = icmp slt i64 %c, 0 ; <i1> [#uses=1] 7 %retval = select i1 %tmp.1, i64 %a, i64 10 ; <i64> [#uses=1] 8 ret i64 %retval 11 define i64 @cmov_const(i64 %a, i64 %b, i64 %c) [all...] |
cmpbge.ll | 3 define i1 @test1(i64 %A, i64 %B) { 4 %C = and i64 %A, 255 ; <i64> [#uses=1] 5 %D = and i64 %B, 255 ; <i64> [#uses=1] 6 %E = icmp uge i64 %C, %D ; <i1> [#uses=1] 10 define i1 @test2(i64 %a, i64 %B) { 11 %A = shl i64 %a, 1 ; <i64> [#uses=1 [all...] |
mul5.ll | 4 define i64 @foo1(i64 %x) { 6 %tmp.1 = mul i64 %x, 9 ; <i64> [#uses=1] 7 ret i64 %tmp.1 10 define i64 @foo3(i64 %x) { 12 %tmp.1 = mul i64 %x, 259 ; <i64> [#uses=1] 13 ret i64 %tmp. [all...] |
/external/llvm/test/CodeGen/PowerPC/ |
subc.ll | 9 define i64 @sub_ll(i64 %a, i64 %b) { 11 %tmp.2 = sub i64 %a, %b ; <i64> [#uses=1] 12 ret i64 %tmp.2 15 define i64 @sub_l_5(i64 %a) { 17 %tmp.1 = sub i64 5, %a ; <i64> [#uses=1 [all...] |
/external/llvm/test/CodeGen/SystemZ/ |
02-RetNeg.ll | 3 define i64 @foo(i64 %a) { 5 %c = sub i64 0, %a 6 ret i64 %c
|
/external/llvm/test/CodeGen/XCore/ |
2008-11-17-Shl64.ll | 3 define i64 @test(i64 %a) { 4 %result = shl i64 %a, 1 5 ret i64 %result
|