1 ; RUN: llc < %s -mtriple=armv7-apple-ios -verify-machineinstrs | FileCheck %s -check-prefix=ARM 2 ; RUN: llc < %s -mtriple=armv7-apple-ios -O0 | FileCheck %s -check-prefix=ARM 3 ; RUN: llc < %s -mtriple=thumbv7-apple-ios | FileCheck %s -check-prefix=THUMBTWO 4 ; RUN: llc < %s -mtriple=thumbv6-apple-ios | FileCheck %s -check-prefix=THUMBONE 5 ; RUN llc < %s -mtriple=armv4-apple-ios | FileCheck %s -check-prefix=ARMV4 6 7 define void @test1(i32* %ptr, i32 %val1) { 8 ; ARM: test1 9 ; ARM: dmb {{ish$}} 10 ; ARM-NEXT: str 11 ; ARM-NEXT: dmb {{ish$}} 12 ; THUMBONE: test1 13 ; THUMBONE: __sync_lock_test_and_set_4 14 ; THUMBTWO: test1 15 ; THUMBTWO: dmb {{ish$}} 16 ; THUMBTWO-NEXT: str 17 ; THUMBTWO-NEXT: dmb {{ish$}} 18 store atomic i32 %val1, i32* %ptr seq_cst, align 4 19 ret void 20 } 21 22 define i32 @test2(i32* %ptr) { 23 ; ARM: test2 24 ; ARM: ldr 25 ; ARM-NEXT: dmb {{ish$}} 26 ; THUMBONE: test2 27 ; THUMBONE: __sync_val_compare_and_swap_4 28 ; THUMBTWO: test2 29 ; THUMBTWO: ldr 30 ; THUMBTWO-NEXT: dmb {{ish$}} 31 %val = load atomic i32* %ptr seq_cst, align 4 32 ret i32 %val 33 } 34 35 define void @test3(i8* %ptr1, i8* %ptr2) { 36 ; ARM: test3 37 ; ARM: ldrb 38 ; ARM: strb 39 ; THUMBTWO: test3 40 ; THUMBTWO: ldrb 41 ; THUMBTWO: strb 42 ; THUMBONE: test3 43 ; THUMBONE: ldrb 44 ; THUMBONE: strb 45 %val = load atomic i8* %ptr1 unordered, align 1 46 store atomic i8 %val, i8* %ptr2 unordered, align 1 47 ret void 48 } 49 50 define void @test4(i8* %ptr1, i8* %ptr2) { 51 ; THUMBONE: test4 52 ; THUMBONE: ___sync_val_compare_and_swap_1 53 ; THUMBONE: ___sync_lock_test_and_set_1 54 %val = load atomic i8* %ptr1 seq_cst, align 1 55 store atomic i8 %val, i8* %ptr2 seq_cst, align 1 56 ret void 57 } 58 59 define i64 @test_old_load_64bit(i64* %p) { 60 ; ARMV4: test_old_load_64bit 61 ; ARMV4: ___sync_val_compare_and_swap_8 62 %1 = load atomic i64* %p seq_cst, align 8 63 ret i64 %1 64 } 65 66 define void @test_old_store_64bit(i64* %p, i64 %v) { 67 ; ARMV4: test_old_store_64bit 68 ; ARMV4: ___sync_lock_test_and_set_8 69 store atomic i64 %v, i64* %p seq_cst, align 8 70 ret void 71 } 72