; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv32 -mattr=+v -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK-RV32 ; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK-RV64 define <2 x i16> @test_v2i16(<2 x i16> %x) { ; CHECK-RV32-LABEL: test_v2i16: ; CHECK-RV32: # %bb.0: ; CHECK-RV32-NEXT: vsetivli zero, 4, e8, mf4, ta, ma ; CHECK-RV32-NEXT: vsra.vi v8, v8, 7 ; CHECK-RV32-NEXT: ret ; ; CHECK-RV64-LABEL: test_v2i16: ; CHECK-RV64: # %bb.0: ; CHECK-RV64-NEXT: vsetivli zero, 4, e8, mf4, ta, ma ; CHECK-RV64-NEXT: vsra.vi v8, v8, 7 ; CHECK-RV64-NEXT: ret %1 = lshr <2 x i16> %x, %2 = and <2 x i16> %1, %3 = mul <2 x i16> %2, ret <2 x i16> %3 } define @test_nxv2i16( %x) { ; CHECK-RV32-LABEL: test_nxv2i16: ; CHECK-RV32: # %bb.0: ; CHECK-RV32-NEXT: vsetvli a0, zero, e16, mf2, ta, ma ; CHECK-RV32-NEXT: vsrl.vi v8, v8, 7 ; CHECK-RV32-NEXT: li a0, 257 ; CHECK-RV32-NEXT: vand.vx v8, v8, a0 ; CHECK-RV32-NEXT: vsll.vi v8, v8, 8 ; CHECK-RV32-NEXT: ret ; ; CHECK-RV64-LABEL: test_nxv2i16: ; CHECK-RV64: # %bb.0: ; CHECK-RV64-NEXT: vsetvli a0, zero, e16, mf2, ta, ma ; CHECK-RV64-NEXT: vsrl.vi v8, v8, 7 ; CHECK-RV64-NEXT: li a0, 257 ; CHECK-RV64-NEXT: vand.vx v8, v8, a0 ; CHECK-RV64-NEXT: vsll.vi v8, v8, 8 ; CHECK-RV64-NEXT: ret %1 = lshr %x, splat (i16 7) %2 = and %1, splat (i16 257) %3 = mul %2, splat (i16 256) ret %3 } define <2 x i32> @test_v2i32(<2 x i32> %x) { ; CHECK-RV32-LABEL: test_v2i32: ; CHECK-RV32: # %bb.0: ; CHECK-RV32-NEXT: vsetivli zero, 4, e16, mf2, ta, ma ; CHECK-RV32-NEXT: vsra.vi v8, v8, 15 ; CHECK-RV32-NEXT: ret ; ; CHECK-RV64-LABEL: test_v2i32: ; CHECK-RV64: # %bb.0: ; CHECK-RV64-NEXT: vsetivli zero, 4, e16, mf2, ta, ma ; CHECK-RV64-NEXT: vsra.vi v8, v8, 15 ; CHECK-RV64-NEXT: ret %1 = lshr <2 x i32> %x, %2 = and <2 x i32> %1, %3 = mul <2 x i32> %2, ret <2 x i32> %3 } define @test_nxv2i32( %x) { ; CHECK-RV32-LABEL: test_nxv2i32: ; CHECK-RV32: # %bb.0: ; CHECK-RV32-NEXT: vsetvli a0, zero, e16, m1, ta, ma ; CHECK-RV32-NEXT: vsra.vi v8, v8, 15 ; CHECK-RV32-NEXT: ret ; ; CHECK-RV64-LABEL: test_nxv2i32: ; CHECK-RV64: # %bb.0: ; CHECK-RV64-NEXT: vsetvli a0, zero, e16, m1, ta, ma ; CHECK-RV64-NEXT: vsra.vi v8, v8, 15 ; CHECK-RV64-NEXT: ret %1 = lshr %x, splat (i32 15) %2 = and %1, splat (i32 65537) %3 = mul %2, splat (i32 65535) ret %3 } define <2 x i64> @test_v2i64(<2 x i64> %x) { ; CHECK-RV32-LABEL: test_v2i64: ; CHECK-RV32: # %bb.0: ; CHECK-RV32-NEXT: vsetivli zero, 4, e32, m1, ta, ma ; CHECK-RV32-NEXT: vsra.vi v8, v8, 31 ; CHECK-RV32-NEXT: ret ; ; CHECK-RV64-LABEL: test_v2i64: ; CHECK-RV64: # %bb.0: ; CHECK-RV64-NEXT: vsetivli zero, 4, e32, m1, ta, ma ; CHECK-RV64-NEXT: vsra.vi v8, v8, 31 ; CHECK-RV64-NEXT: ret %1 = lshr <2 x i64> %x, %2 = and <2 x i64> %1, %3 = mul <2 x i64> %2, ret <2 x i64> %3 } define @test_nxv2i64( %x) { ; CHECK-RV32-LABEL: test_nxv2i64: ; CHECK-RV32: # %bb.0: ; CHECK-RV32-NEXT: vsetvli a0, zero, e32, m2, ta, ma ; CHECK-RV32-NEXT: vsra.vi v8, v8, 31 ; CHECK-RV32-NEXT: ret ; ; CHECK-RV64-LABEL: test_nxv2i64: ; CHECK-RV64: # %bb.0: ; CHECK-RV64-NEXT: vsetvli a0, zero, e32, m2, ta, ma ; CHECK-RV64-NEXT: vsra.vi v8, v8, 31 ; CHECK-RV64-NEXT: ret %1 = lshr %x, splat (i64 31) %2 = and %1, splat (i64 4294967297) %3 = mul %2, splat (i64 4294967295) ret %3 }