; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5 ; RUN: llc %s -o - | FileCheck %s ; RUN: llc -combiner-disabled %s -o - | FileCheck %s target triple = "aarch64-unknown-linux-gnu" ; Verify lowering code in isolation to ensure we can lower shifts that would ; normally be optimised away. define i32 @fshl_i32_by_zero(i32 %unused, i32 %a, i32 %b) { ; CHECK-LABEL: fshl_i32_by_zero: ; CHECK: // %bb.0: ; CHECK-NEXT: mov w0, w1 ; CHECK-NEXT: ret %r = call i32 @llvm.fshl.i32(i32 %a, i32 %b, i32 0) ret i32 %r } define i32 @fshl_i32_by_half_srclen(i32 %unused, i32 %a, i32 %b) { ; CHECK-LABEL: fshl_i32_by_half_srclen: ; CHECK: // %bb.0: ; CHECK-NEXT: extr w0, w1, w2, #16 ; CHECK-NEXT: ret %r = call i32 @llvm.fshl.i32(i32 %a, i32 %b, i32 16) ret i32 %r } define i32 @fshl_i32_by_srclen(i32 %unused, i32 %a, i32 %b) { ; CHECK-LABEL: fshl_i32_by_srclen: ; CHECK: // %bb.0: ; CHECK-NEXT: mov w0, w1 ; CHECK-NEXT: ret %r = call i32 @llvm.fshl.i32(i32 %a, i32 %b, i32 32) ret i32 %r } define i32 @fshl_i32_by_srclen_plus1(i32 %unused, i32 %a, i32 %b) { ; CHECK-LABEL: fshl_i32_by_srclen_plus1: ; CHECK: // %bb.0: ; CHECK-NEXT: extr w0, w1, w2, #31 ; CHECK-NEXT: ret %r = call i32 @llvm.fshl.i32(i32 %a, i32 %b, i32 33) ret i32 %r } define i64 @fshl_i64_by_zero(i64 %unused, i64 %a, i64 %b) { ; CHECK-LABEL: fshl_i64_by_zero: ; CHECK: // %bb.0: ; CHECK-NEXT: mov x0, x1 ; CHECK-NEXT: ret %r = call i64 @llvm.fshl.i64(i64 %a, i64 %b, i64 0) ret i64 %r } define i64 @fshl_i64_by_srclen(i64 %unused, i64 %a, i64 %b) { ; CHECK-LABEL: fshl_i64_by_srclen: ; CHECK: // %bb.0: ; CHECK-NEXT: mov x0, x1 ; CHECK-NEXT: ret %r = call i64 @llvm.fshl.i64(i64 %a, i64 %b, i64 64) ret i64 %r } define i64 @fshl_i64_by_srclen_plus1(i64 %unused, i64 %a, i64 %b) { ; CHECK-LABEL: fshl_i64_by_srclen_plus1: ; CHECK: // %bb.0: ; CHECK-NEXT: extr x0, x1, x2, #63 ; CHECK-NEXT: ret %r = call i64 @llvm.fshl.i64(i64 %a, i64 %b, i64 65) ret i64 %r } define i32 @fshr_i32_by_zero(i32 %unused, i32 %a, i32 %b) { ; CHECK-LABEL: fshr_i32_by_zero: ; CHECK: // %bb.0: ; CHECK-NEXT: mov w0, w2 ; CHECK-NEXT: ret %r = call i32 @llvm.fshr.i32(i32 %a, i32 %b, i32 0) ret i32 %r } define i32 @fshr_i32_by_srclen(i32 %unused, i32 %a, i32 %b) { ; CHECK-LABEL: fshr_i32_by_srclen: ; CHECK: // %bb.0: ; CHECK-NEXT: mov w0, w2 ; CHECK-NEXT: ret %r = call i32 @llvm.fshr.i32(i32 %a, i32 %b, i32 32) ret i32 %r } define i32 @fshr_i32_by_half_srclen(i32 %unused, i32 %a, i32 %b) { ; CHECK-LABEL: fshr_i32_by_half_srclen: ; CHECK: // %bb.0: ; CHECK-NEXT: extr w0, w1, w2, #16 ; CHECK-NEXT: ret %r = call i32 @llvm.fshr.i32(i32 %a, i32 %b, i32 16) ret i32 %r } define i32 @fshr_i32_by_srclen_plus1(i32 %unused, i32 %a, i32 %b) { ; CHECK-LABEL: fshr_i32_by_srclen_plus1: ; CHECK: // %bb.0: ; CHECK-NEXT: extr w0, w1, w2, #1 ; CHECK-NEXT: ret %r = call i32 @llvm.fshr.i32(i32 %a, i32 %b, i32 33) ret i32 %r } define i64 @fshr_i64_by_zero(i64 %unused, i64 %a, i64 %b) { ; CHECK-LABEL: fshr_i64_by_zero: ; CHECK: // %bb.0: ; CHECK-NEXT: mov x0, x2 ; CHECK-NEXT: ret %r = call i64 @llvm.fshr.i64(i64 %a, i64 %b, i64 0) ret i64 %r } define i64 @fshr_i64_by_srclen(i64 %unused, i64 %a, i64 %b) { ; CHECK-LABEL: fshr_i64_by_srclen: ; CHECK: // %bb.0: ; CHECK-NEXT: mov x0, x2 ; CHECK-NEXT: ret %r = call i64 @llvm.fshr.i64(i64 %a, i64 %b, i64 64) ret i64 %r } define i64 @fshr_i64_by_srclen_plus1(i64 %unused, i64 %a, i64 %b) { ; CHECK-LABEL: fshr_i64_by_srclen_plus1: ; CHECK: // %bb.0: ; CHECK-NEXT: extr x0, x1, x2, #1 ; CHECK-NEXT: ret %r = call i64 @llvm.fshr.i64(i64 %a, i64 %b, i64 65) ret i64 %r }