; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv32 -mattr=+d,+zvfh,+zvfbfmin,+v \ ; RUN: -target-abi=ilp32d -verify-machineinstrs < %s | FileCheck %s \ ; RUN: --check-prefixes=CHECK,ZVFH ; RUN: llc -mtriple=riscv64 -mattr=+d,+zvfh,+zvfbfmin,+v \ ; RUN: -target-abi=lp64d -verify-machineinstrs < %s | FileCheck %s \ ; RUN: --check-prefixes=CHECK,ZVFH ; RUN: llc -mtriple=riscv32 -mattr=+d,+zfhmin,+zvfhmin,+zvfbfmin,+v \ ; RUN: -target-abi=ilp32d -verify-machineinstrs < %s | FileCheck %s \ ; RUN: --check-prefixes=CHECK,ZVFHMIN ; RUN: llc -mtriple=riscv64 -mattr=+d,+zfhmin,+zvfhmin,+zvfbfmin,+v \ ; RUN: -target-abi=lp64d -verify-machineinstrs < %s | FileCheck %s \ ; RUN: --check-prefixes=CHECK,ZVFHMIN define @nxv1bf16( %v) { ; CHECK-LABEL: nxv1bf16: ; CHECK: # %bb.0: ; CHECK-NEXT: lui a0, 8 ; CHECK-NEXT: addi a0, a0, -1 ; CHECK-NEXT: vsetvli a1, zero, e16, mf4, ta, ma ; CHECK-NEXT: vand.vx v8, v8, a0 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv1bf16( %v) ret %r } define @nxv2bf16( %v) { ; CHECK-LABEL: nxv2bf16: ; CHECK: # %bb.0: ; CHECK-NEXT: lui a0, 8 ; CHECK-NEXT: addi a0, a0, -1 ; CHECK-NEXT: vsetvli a1, zero, e16, mf2, ta, ma ; CHECK-NEXT: vand.vx v8, v8, a0 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv2bf16( %v) ret %r } define @nxv4bf16( %v) { ; CHECK-LABEL: nxv4bf16: ; CHECK: # %bb.0: ; CHECK-NEXT: lui a0, 8 ; CHECK-NEXT: addi a0, a0, -1 ; CHECK-NEXT: vsetvli a1, zero, e16, m1, ta, ma ; CHECK-NEXT: vand.vx v8, v8, a0 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv4bf16( %v) ret %r } define @nxv8bf16( %v) { ; CHECK-LABEL: nxv8bf16: ; CHECK: # %bb.0: ; CHECK-NEXT: lui a0, 8 ; CHECK-NEXT: addi a0, a0, -1 ; CHECK-NEXT: vsetvli a1, zero, e16, m2, ta, ma ; CHECK-NEXT: vand.vx v8, v8, a0 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv8bf16( %v) ret %r } define @nxv16bf16( %v) { ; CHECK-LABEL: nxv16bf16: ; CHECK: # %bb.0: ; CHECK-NEXT: lui a0, 8 ; CHECK-NEXT: addi a0, a0, -1 ; CHECK-NEXT: vsetvli a1, zero, e16, m4, ta, ma ; CHECK-NEXT: vand.vx v8, v8, a0 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv16bf16( %v) ret %r } define @nxv32bf16( %v) { ; CHECK-LABEL: nxv32bf16: ; CHECK: # %bb.0: ; CHECK-NEXT: lui a0, 8 ; CHECK-NEXT: addi a0, a0, -1 ; CHECK-NEXT: vsetvli a1, zero, e16, m8, ta, ma ; CHECK-NEXT: vand.vx v8, v8, a0 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv32bf16( %v) ret %r } declare @llvm.fabs.nxv1f16() define @vfabs_nxv1f16( %v) { ; ZVFH-LABEL: vfabs_nxv1f16: ; ZVFH: # %bb.0: ; ZVFH-NEXT: vsetvli a0, zero, e16, mf4, ta, ma ; ZVFH-NEXT: vfabs.v v8, v8 ; ZVFH-NEXT: ret ; ; ZVFHMIN-LABEL: vfabs_nxv1f16: ; ZVFHMIN: # %bb.0: ; ZVFHMIN-NEXT: lui a0, 8 ; ZVFHMIN-NEXT: addi a0, a0, -1 ; ZVFHMIN-NEXT: vsetvli a1, zero, e16, mf4, ta, ma ; ZVFHMIN-NEXT: vand.vx v8, v8, a0 ; ZVFHMIN-NEXT: ret %r = call @llvm.fabs.nxv1f16( %v) ret %r } declare @llvm.fabs.nxv2f16() define @vfabs_nxv2f16( %v) { ; ZVFH-LABEL: vfabs_nxv2f16: ; ZVFH: # %bb.0: ; ZVFH-NEXT: vsetvli a0, zero, e16, mf2, ta, ma ; ZVFH-NEXT: vfabs.v v8, v8 ; ZVFH-NEXT: ret ; ; ZVFHMIN-LABEL: vfabs_nxv2f16: ; ZVFHMIN: # %bb.0: ; ZVFHMIN-NEXT: lui a0, 8 ; ZVFHMIN-NEXT: addi a0, a0, -1 ; ZVFHMIN-NEXT: vsetvli a1, zero, e16, mf2, ta, ma ; ZVFHMIN-NEXT: vand.vx v8, v8, a0 ; ZVFHMIN-NEXT: ret %r = call @llvm.fabs.nxv2f16( %v) ret %r } declare @llvm.fabs.nxv4f16() define @vfabs_nxv4f16( %v) { ; ZVFH-LABEL: vfabs_nxv4f16: ; ZVFH: # %bb.0: ; ZVFH-NEXT: vsetvli a0, zero, e16, m1, ta, ma ; ZVFH-NEXT: vfabs.v v8, v8 ; ZVFH-NEXT: ret ; ; ZVFHMIN-LABEL: vfabs_nxv4f16: ; ZVFHMIN: # %bb.0: ; ZVFHMIN-NEXT: lui a0, 8 ; ZVFHMIN-NEXT: addi a0, a0, -1 ; ZVFHMIN-NEXT: vsetvli a1, zero, e16, m1, ta, ma ; ZVFHMIN-NEXT: vand.vx v8, v8, a0 ; ZVFHMIN-NEXT: ret %r = call @llvm.fabs.nxv4f16( %v) ret %r } declare @llvm.fabs.nxv8f16() define @vfabs_nxv8f16( %v) { ; ZVFH-LABEL: vfabs_nxv8f16: ; ZVFH: # %bb.0: ; ZVFH-NEXT: vsetvli a0, zero, e16, m2, ta, ma ; ZVFH-NEXT: vfabs.v v8, v8 ; ZVFH-NEXT: ret ; ; ZVFHMIN-LABEL: vfabs_nxv8f16: ; ZVFHMIN: # %bb.0: ; ZVFHMIN-NEXT: lui a0, 8 ; ZVFHMIN-NEXT: addi a0, a0, -1 ; ZVFHMIN-NEXT: vsetvli a1, zero, e16, m2, ta, ma ; ZVFHMIN-NEXT: vand.vx v8, v8, a0 ; ZVFHMIN-NEXT: ret %r = call @llvm.fabs.nxv8f16( %v) ret %r } declare @llvm.fabs.nxv16f16() define @vfabs_nxv16f16( %v) { ; ZVFH-LABEL: vfabs_nxv16f16: ; ZVFH: # %bb.0: ; ZVFH-NEXT: vsetvli a0, zero, e16, m4, ta, ma ; ZVFH-NEXT: vfabs.v v8, v8 ; ZVFH-NEXT: ret ; ; ZVFHMIN-LABEL: vfabs_nxv16f16: ; ZVFHMIN: # %bb.0: ; ZVFHMIN-NEXT: lui a0, 8 ; ZVFHMIN-NEXT: addi a0, a0, -1 ; ZVFHMIN-NEXT: vsetvli a1, zero, e16, m4, ta, ma ; ZVFHMIN-NEXT: vand.vx v8, v8, a0 ; ZVFHMIN-NEXT: ret %r = call @llvm.fabs.nxv16f16( %v) ret %r } declare @llvm.fabs.nxv32f16() define @vfabs_nxv32f16( %v) { ; ZVFH-LABEL: vfabs_nxv32f16: ; ZVFH: # %bb.0: ; ZVFH-NEXT: vsetvli a0, zero, e16, m8, ta, ma ; ZVFH-NEXT: vfabs.v v8, v8 ; ZVFH-NEXT: ret ; ; ZVFHMIN-LABEL: vfabs_nxv32f16: ; ZVFHMIN: # %bb.0: ; ZVFHMIN-NEXT: lui a0, 8 ; ZVFHMIN-NEXT: addi a0, a0, -1 ; ZVFHMIN-NEXT: vsetvli a1, zero, e16, m8, ta, ma ; ZVFHMIN-NEXT: vand.vx v8, v8, a0 ; ZVFHMIN-NEXT: ret %r = call @llvm.fabs.nxv32f16( %v) ret %r } declare @llvm.fabs.nxv1f32() define @vfabs_nxv1f32( %v) { ; CHECK-LABEL: vfabs_nxv1f32: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma ; CHECK-NEXT: vfabs.v v8, v8 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv1f32( %v) ret %r } declare @llvm.fabs.nxv2f32() define @vfabs_nxv2f32( %v) { ; CHECK-LABEL: vfabs_nxv2f32: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma ; CHECK-NEXT: vfabs.v v8, v8 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv2f32( %v) ret %r } declare @llvm.fabs.nxv4f32() define @vfabs_nxv4f32( %v) { ; CHECK-LABEL: vfabs_nxv4f32: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma ; CHECK-NEXT: vfabs.v v8, v8 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv4f32( %v) ret %r } declare @llvm.fabs.nxv8f32() define @vfabs_nxv8f32( %v) { ; CHECK-LABEL: vfabs_nxv8f32: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma ; CHECK-NEXT: vfabs.v v8, v8 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv8f32( %v) ret %r } declare @llvm.fabs.nxv16f32() define @vfabs_nxv16f32( %v) { ; CHECK-LABEL: vfabs_nxv16f32: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli a0, zero, e32, m8, ta, ma ; CHECK-NEXT: vfabs.v v8, v8 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv16f32( %v) ret %r } declare @llvm.fabs.nxv1f64() define @vfabs_nxv1f64( %v) { ; CHECK-LABEL: vfabs_nxv1f64: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vfabs.v v8, v8 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv1f64( %v) ret %r } declare @llvm.fabs.nxv2f64() define @vfabs_nxv2f64( %v) { ; CHECK-LABEL: vfabs_nxv2f64: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli a0, zero, e64, m2, ta, ma ; CHECK-NEXT: vfabs.v v8, v8 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv2f64( %v) ret %r } declare @llvm.fabs.nxv4f64() define @vfabs_nxv4f64( %v) { ; CHECK-LABEL: vfabs_nxv4f64: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli a0, zero, e64, m4, ta, ma ; CHECK-NEXT: vfabs.v v8, v8 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv4f64( %v) ret %r } declare @llvm.fabs.nxv8f64() define @vfabs_nxv8f64( %v) { ; CHECK-LABEL: vfabs_nxv8f64: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli a0, zero, e64, m8, ta, ma ; CHECK-NEXT: vfabs.v v8, v8 ; CHECK-NEXT: ret %r = call @llvm.fabs.nxv8f64( %v) ret %r }