aboutsummaryrefslogtreecommitdiffstats
path: root/test/CodeGen/AArch64/dp2.ll
diff options
context:
space:
mode:
Diffstat (limited to 'test/CodeGen/AArch64/dp2.ll')
-rw-r--r--test/CodeGen/AArch64/dp2.ll24
1 files changed, 12 insertions, 12 deletions
diff --git a/test/CodeGen/AArch64/dp2.ll b/test/CodeGen/AArch64/dp2.ll
index 48b0701..71b3169 100644
--- a/test/CodeGen/AArch64/dp2.ll
+++ b/test/CodeGen/AArch64/dp2.ll
@@ -1,4 +1,4 @@
-; RUN: llc -verify-machineinstrs < %s -mtriple=aarch64-none-linux-gnu | FileCheck %s
+; RUN: llc -verify-machineinstrs -o - %s -mtriple=arm64 | FileCheck %s
@var32_0 = global i32 0
@var32_1 = global i32 0
@@ -13,7 +13,7 @@ define void @rorv_i64() {
%val3_tmp = shl i64 %val0_tmp, %val2_tmp
%val4_tmp = lshr i64 %val0_tmp, %val1_tmp
%val5_tmp = or i64 %val3_tmp, %val4_tmp
-; CHECK: ror {{x[0-9]+}}, {{x[0-9]+}}, {{x[0-9]+}}
+; CHECK: {{ror|rorv}} {{x[0-9]+}}, {{x[0-9]+}}, {{x[0-9]+}}
store volatile i64 %val5_tmp, i64* @var64_0
ret void
}
@@ -23,7 +23,7 @@ define void @asrv_i64() {
%val0_tmp = load i64* @var64_0
%val1_tmp = load i64* @var64_1
%val4_tmp = ashr i64 %val0_tmp, %val1_tmp
-; CHECK: asr {{x[0-9]+}}, {{x[0-9]+}}, {{x[0-9]+}}
+; CHECK: {{asr|asrv}} {{x[0-9]+}}, {{x[0-9]+}}, {{x[0-9]+}}
store volatile i64 %val4_tmp, i64* @var64_1
ret void
}
@@ -33,7 +33,7 @@ define void @lsrv_i64() {
%val0_tmp = load i64* @var64_0
%val1_tmp = load i64* @var64_1
%val4_tmp = lshr i64 %val0_tmp, %val1_tmp
-; CHECK: lsr {{x[0-9]+}}, {{x[0-9]+}}, {{x[0-9]+}}
+; CHECK: {{lsr|lsrv}} {{x[0-9]+}}, {{x[0-9]+}}, {{x[0-9]+}}
store volatile i64 %val4_tmp, i64* @var64_0
ret void
}
@@ -43,7 +43,7 @@ define void @lslv_i64() {
%val0_tmp = load i64* @var64_0
%val1_tmp = load i64* @var64_1
%val4_tmp = shl i64 %val0_tmp, %val1_tmp
-; CHECK: lsl {{x[0-9]+}}, {{x[0-9]+}}, {{x[0-9]+}}
+; CHECK: {{lsl|lslv}} {{x[0-9]+}}, {{x[0-9]+}}, {{x[0-9]+}}
store volatile i64 %val4_tmp, i64* @var64_1
ret void
}
@@ -75,7 +75,7 @@ define void @lsrv_i32() {
%val1_tmp = load i32* @var32_1
%val2_tmp = add i32 1, %val1_tmp
%val4_tmp = lshr i32 %val0_tmp, %val2_tmp
-; CHECK: lsr {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
+; CHECK: {{lsr|lsrv}} {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
store volatile i32 %val4_tmp, i32* @var32_0
ret void
}
@@ -86,7 +86,7 @@ define void @lslv_i32() {
%val1_tmp = load i32* @var32_1
%val2_tmp = add i32 1, %val1_tmp
%val4_tmp = shl i32 %val0_tmp, %val2_tmp
-; CHECK: lsl {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
+; CHECK: {{lsl|lslv}} {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
store volatile i32 %val4_tmp, i32* @var32_1
ret void
}
@@ -100,7 +100,7 @@ define void @rorv_i32() {
%val3_tmp = shl i32 %val0_tmp, %val2_tmp
%val4_tmp = lshr i32 %val0_tmp, %val1_tmp
%val5_tmp = or i32 %val3_tmp, %val4_tmp
-; CHECK: ror {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
+; CHECK: {{ror|rorv}} {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
store volatile i32 %val5_tmp, i32* @var32_0
ret void
}
@@ -111,7 +111,7 @@ define void @asrv_i32() {
%val1_tmp = load i32* @var32_1
%val2_tmp = add i32 1, %val1_tmp
%val4_tmp = ashr i32 %val0_tmp, %val2_tmp
-; CHECK: asr {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
+; CHECK: {{asr|asrv}} {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
store volatile i32 %val4_tmp, i32* @var32_1
ret void
}
@@ -143,7 +143,7 @@ define i32 @test_lsl32() {
%val = load i32* @var32_0
%ret = shl i32 1, %val
-; CHECK: lsl {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
+; CHECK: {{lsl|lslv}} {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
ret i32 %ret
}
@@ -153,7 +153,7 @@ define i32 @test_lsr32() {
%val = load i32* @var32_0
%ret = lshr i32 1, %val
-; CHECK: lsr {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
+; CHECK: {{lsr|lsrv}} {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
ret i32 %ret
}
@@ -163,7 +163,7 @@ define i32 @test_asr32(i32 %in) {
%val = load i32* @var32_0
%ret = ashr i32 %in, %val
-; CHECK: asr {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
+; CHECK: {{asr|asrv}} {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}
ret i32 %ret
}