diff options
Diffstat (limited to 'test/CodeGen/AArch64/intrinsics-memory-barrier.ll')
-rw-r--r-- | test/CodeGen/AArch64/intrinsics-memory-barrier.ll | 57 |
1 files changed, 57 insertions, 0 deletions
diff --git a/test/CodeGen/AArch64/intrinsics-memory-barrier.ll b/test/CodeGen/AArch64/intrinsics-memory-barrier.ll new file mode 100644 index 0000000..09e34ae --- /dev/null +++ b/test/CodeGen/AArch64/intrinsics-memory-barrier.ll @@ -0,0 +1,57 @@ +; RUN: llc < %s -mtriple=aarch64-eabi -O=3 | FileCheck %s + +define void @test() { + ; CHECK: dmb sy + call void @llvm.aarch64.dmb(i32 15) + ; CHECK: dmb osh + call void @llvm.aarch64.dmb(i32 3) + ; CHECK: dsb sy + call void @llvm.aarch64.dsb(i32 15) + ; CHECK: dsb ishld + call void @llvm.aarch64.dsb(i32 9) + ; CHECK: isb + call void @llvm.aarch64.isb(i32 15) + ret void +} + +; Important point is that the compiler should not reorder memory access +; instructions around DMB. +; Failure to do so, two STRs will collapse into one STP. +define void @test_dmb_reordering(i32 %a, i32 %b, i32* %d) { + store i32 %a, i32* %d ; CHECK: str {{w[0-9]+}}, [{{x[0-9]+}}] + + call void @llvm.aarch64.dmb(i32 15); CHECK: dmb sy + + %d1 = getelementptr i32* %d, i64 1 + store i32 %b, i32* %d1 ; CHECK: str {{w[0-9]+}}, [{{x[0-9]+}}, #4] + + ret void +} + +; Similarly for DSB. +define void @test_dsb_reordering(i32 %a, i32 %b, i32* %d) { + store i32 %a, i32* %d ; CHECK: str {{w[0-9]+}}, [{{x[0-9]+}}] + + call void @llvm.aarch64.dsb(i32 15); CHECK: dsb sy + + %d1 = getelementptr i32* %d, i64 1 + store i32 %b, i32* %d1 ; CHECK: str {{w[0-9]+}}, [{{x[0-9]+}}, #4] + + ret void +} + +; And ISB. +define void @test_isb_reordering(i32 %a, i32 %b, i32* %d) { + store i32 %a, i32* %d ; CHECK: str {{w[0-9]+}}, [{{x[0-9]+}}] + + call void @llvm.aarch64.isb(i32 15); CHECK: isb + + %d1 = getelementptr i32* %d, i64 1 + store i32 %b, i32* %d1 ; CHECK: str {{w[0-9]+}}, [{{x[0-9]+}}, #4] + + ret void +} + +declare void @llvm.aarch64.dmb(i32) +declare void @llvm.aarch64.dsb(i32) +declare void @llvm.aarch64.isb(i32) |