; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v \ ; RUN: -verify-machineinstrs | FileCheck %s ; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v \ ; RUN: -verify-machineinstrs | FileCheck %s ; vadd.vv declare @llvm.riscv.vadd.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vadd_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vadd_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vadd.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vadd.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vadd.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vadd_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vadd_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vadd.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vadd.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vand.vv declare @llvm.riscv.vand.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vand_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vand_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vand.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vand.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vand.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vand.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vand_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vand_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vand.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vand.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vand.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vor.vv declare @llvm.riscv.vor.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vor_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vor_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vor.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vor.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vor.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vor.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vor_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vor_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vor.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vor.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vor.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vxor.vv declare @llvm.riscv.vxor.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vxor_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vxor_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vxor.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vxor.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vxor.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vxor.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vxor_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vxor_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vxor.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vxor.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vxor.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vmseq.vv declare @llvm.riscv.vmseq.nxv1i64(, , iXLen); define @commutable_vmseq_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vmseq_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmseq.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma ; CHECK-NEXT: vmxor.mm v0, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vmseq.nxv1i64( %0, %1, iXLen %2) %b = call @llvm.riscv.vmseq.nxv1i64( %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vmseq.mask.nxv1i64(, , , , iXLen); define @commutable_vmseq_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vmseq_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmseq.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma ; CHECK-NEXT: vmxor.mm v0, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vmseq.mask.nxv1i64( poison, %0, %1, %mask, iXLen %2) %b = call @llvm.riscv.vmseq.mask.nxv1i64( poison, %1, %0, %mask, iXLen %2) %ret = add %a, %b ret %ret } ; vmsne.vv declare @llvm.riscv.vmsne.nxv1i64(, , iXLen); define @commutable_vmsne_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vmsne_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmsne.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma ; CHECK-NEXT: vmxor.mm v0, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vmsne.nxv1i64( %0, %1, iXLen %2) %b = call @llvm.riscv.vmsne.nxv1i64( %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vmsne.mask.nxv1i64(, , , , iXLen); define @commutable_vmsne_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vmsne_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmsne.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma ; CHECK-NEXT: vmxor.mm v0, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vmsne.mask.nxv1i64( poison, %0, %1, %mask, iXLen %2) %b = call @llvm.riscv.vmsne.mask.nxv1i64( poison, %1, %0, %mask, iXLen %2) %ret = add %a, %b ret %ret } ; vmin.vv declare @llvm.riscv.vmin.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vmin_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vmin_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmin.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vmin.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vmin.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vmin.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vmin_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vmin_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmin.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vmin.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vmin.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vminu.vv declare @llvm.riscv.vminu.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vminu_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vminu_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vminu.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vminu.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vminu.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vminu.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vminu_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vminu_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vminu.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vminu.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vminu.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vmax.vv declare @llvm.riscv.vmax.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vmax_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vmax_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmax.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vmax.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vmax.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vmax.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vmax_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vmax_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmax.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vmax.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vmax.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vmaxu.vv declare @llvm.riscv.vmaxu.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vmaxu_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vmaxu_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmaxu.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vmaxu.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vmaxu.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vmaxu.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vmaxu_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vmaxu_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmaxu.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vmaxu.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vmaxu.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vmul.vv declare @llvm.riscv.vmul.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vmul_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vmul_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmul.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vmul.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vmul.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vmul.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vmul_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vmul_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmul.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vmul.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vmul.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vmulh.vv declare @llvm.riscv.vmulh.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vmulh_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vmulh_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmulh.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vmulh.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vmulh.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vmulh.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vmulh_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vmulh_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmulh.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vmulh.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vmulh.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vmulhu.vv declare @llvm.riscv.vmulhu.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vmulhu_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vmulhu_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmulhu.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vmulhu.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vmulhu.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vmulhu.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vmulhu_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vmulhu_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vmulhu.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vmulhu.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vmulhu.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vwadd.vv declare @llvm.riscv.vwadd.nxv1i64.nxv1i32.nxv1i32(, , , iXLen); define @commutable_vwadd_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vwadd_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwadd.vv v10, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v10 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vwadd.nxv1i64.nxv1i32.nxv1i32( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vwadd.nxv1i64.nxv1i32.nxv1i32( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vwadd.mask.nxv1i64.nxv1i32.nxv1i32(, , , , iXLen, iXLen); define @commutable_vwadd_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vwadd_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwadd.vv v10, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v10 ; CHECK-NEXT: ret %a = call @llvm.riscv.vwadd.mask.nxv1i64.nxv1i32.nxv1i32( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vwadd.mask.nxv1i64.nxv1i32.nxv1i32( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vwaddu.vv declare @llvm.riscv.vwaddu.nxv1i64.nxv1i32.nxv1i32(, , , iXLen); define @commutable_vwaddu_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vwaddu_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwaddu.vv v10, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v10 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vwaddu.nxv1i64.nxv1i32.nxv1i32( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vwaddu.nxv1i64.nxv1i32.nxv1i32( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vwaddu.mask.nxv1i64.nxv1i32.nxv1i32(, , , , iXLen, iXLen); define @commutable_vwaddu_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vwaddu_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwaddu.vv v10, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v10 ; CHECK-NEXT: ret %a = call @llvm.riscv.vwaddu.mask.nxv1i64.nxv1i32.nxv1i32( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vwaddu.mask.nxv1i64.nxv1i32.nxv1i32( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vwmul.vv declare @llvm.riscv.vwmul.nxv1i64.nxv1i32.nxv1i32(, , , iXLen); define @commutable_vwmul_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vwmul_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwmul.vv v10, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v10 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vwmul.nxv1i64.nxv1i32.nxv1i32( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vwmul.nxv1i64.nxv1i32.nxv1i32( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vwmul.mask.nxv1i64.nxv1i32.nxv1i32(, , , , iXLen, iXLen); define @commutable_vwmul_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vwmul_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwmul.vv v10, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v10 ; CHECK-NEXT: ret %a = call @llvm.riscv.vwmul.mask.nxv1i64.nxv1i32.nxv1i32( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vwmul.mask.nxv1i64.nxv1i32.nxv1i32( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vwmulu.vv declare @llvm.riscv.vwmulu.nxv1i64.nxv1i32.nxv1i32(, , , iXLen); define @commutable_vwmulu_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vwmulu_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwmulu.vv v10, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v10 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vwmulu.nxv1i64.nxv1i32.nxv1i32( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vwmulu.nxv1i64.nxv1i32.nxv1i32( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vwmulu.mask.nxv1i64.nxv1i32.nxv1i32(, , , , iXLen, iXLen); define @commutable_vwmulu_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vwmulu_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwmulu.vv v10, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v10 ; CHECK-NEXT: ret %a = call @llvm.riscv.vwmulu.mask.nxv1i64.nxv1i32.nxv1i32( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vwmulu.mask.nxv1i64.nxv1i32.nxv1i32( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vwmacc.vv declare @llvm.riscv.vwmacc.nxv1i64.nxv1i32(, , , iXLen, iXLen); define @commutable_vwmacc_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vwmacc_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwmacc.vv v10, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v10 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vwmacc.nxv1i64.nxv1i32( poison, %0, %1, iXLen %2, iXLen 1) %b = call @llvm.riscv.vwmacc.nxv1i64.nxv1i32( poison, %1, %0, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } declare @llvm.riscv.vwmacc.mask.nxv1i64.nxv1i32(, , , , iXLen, iXLen); define @commutable_vwmacc_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vwmacc_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwmacc.vv v10, v8, v9, v0.t ; CHECK-NEXT: vwmacc.vv v11, v9, v8, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v11 ; CHECK-NEXT: ret %a = call @llvm.riscv.vwmacc.mask.nxv1i64.nxv1i32( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vwmacc.mask.nxv1i64.nxv1i32( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vwmaccu.vv declare @llvm.riscv.vwmaccu.nxv1i64.nxv1i32(, , , iXLen, iXLen); define @commutable_vwmaccu_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vwmaccu_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwmaccu.vv v10, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v10 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vwmaccu.nxv1i64.nxv1i32( poison, %0, %1, iXLen %2, iXLen 1) %b = call @llvm.riscv.vwmaccu.nxv1i64.nxv1i32( poison, %1, %0, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } declare @llvm.riscv.vwmaccu.mask.nxv1i64.nxv1i32(, , , , iXLen, iXLen); define @commutable_vwmaccu_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vwmaccu_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e32, mf2, ta, ma ; CHECK-NEXT: vwmaccu.vv v10, v8, v9, v0.t ; CHECK-NEXT: vwmaccu.vv v11, v9, v8, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v10, v11 ; CHECK-NEXT: ret %a = call @llvm.riscv.vwmaccu.mask.nxv1i64.nxv1i32( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vwmaccu.mask.nxv1i64.nxv1i32( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vadc.vvm declare @llvm.riscv.vadc.nxv1i64.nxv1i64(, , , , iXLen); define @commutable_vadc_vv( %0, %1, %mask, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vadc_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vadc.vvm v8, v8, v9, v0 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vadc.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2) %b = call @llvm.riscv.vadc.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2) %ret = add %a, %b ret %ret } ; vsadd.vv declare @llvm.riscv.vsadd.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vsadd_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vsadd_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vsadd.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vsadd.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vsadd.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vsadd.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vsadd_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vsadd_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vsadd.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vsadd.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vsadd.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vsaddu.vv declare @llvm.riscv.vsaddu.nxv1i64.nxv1i64(, , , iXLen); define @commutable_vsaddu_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vsaddu_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vsaddu.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vsaddu.nxv1i64.nxv1i64( poison, %0, %1, iXLen %2) %b = call @llvm.riscv.vsaddu.nxv1i64.nxv1i64( poison, %1, %0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vsaddu.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen); define @commutable_vsaddu_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vsaddu_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vsaddu.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vsaddu.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen %2, iXLen 1) %b = call @llvm.riscv.vsaddu.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vaadd.vv declare @llvm.riscv.vaadd.nxv1i64.nxv1i64(, , , iXLen, iXLen); define @commutable_vaadd_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vaadd_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: csrwi vxrm, 0 ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vaadd.nxv1i64.nxv1i64( poison, %0, %1, iXLen 0, iXLen %2) %b = call @llvm.riscv.vaadd.nxv1i64.nxv1i64( poison, %1, %0, iXLen 0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen, iXLen); define @commutable_vaadd_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vaadd_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: csrwi vxrm, 0 ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vaadd.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen 0, iXLen %2, iXLen 1) %b = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen 0, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vaaddu.vv declare @llvm.riscv.vaaddu.nxv1i64.nxv1i64(, , , iXLen, iXLen); define @commutable_vaaddu_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vaaddu_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: csrwi vxrm, 0 ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64( poison, %0, %1, iXLen 0, iXLen %2) %b = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64( poison, %1, %0, iXLen 0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen, iXLen); define @commutable_vaaddu_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vaaddu_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: csrwi vxrm, 0 ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vaaddu.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen 0, iXLen %2, iXLen 1) %b = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen 0, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret } ; vsmul.vv declare @llvm.riscv.vsmul.nxv1i64.nxv1i64(, , , iXLen, iXLen); define @commutable_vsmul_vv( %0, %1, iXLen %2) nounwind { ; CHECK-LABEL: commutable_vsmul_vv: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: csrwi vxrm, 0 ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vsmul.vv v8, v8, v9 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret entry: %a = call @llvm.riscv.vsmul.nxv1i64.nxv1i64( poison, %0, %1, iXLen 0, iXLen %2) %b = call @llvm.riscv.vsmul.nxv1i64.nxv1i64( poison, %1, %0, iXLen 0, iXLen %2) %ret = add %a, %b ret %ret } declare @llvm.riscv.vsmul.mask.nxv1i64.nxv1i64(, , , , iXLen, iXLen, iXLen); define @commutable_vsmul_vv_masked( %0, %1, %mask, iXLen %2) { ; CHECK-LABEL: commutable_vsmul_vv_masked: ; CHECK: # %bb.0: ; CHECK-NEXT: csrwi vxrm, 0 ; CHECK-NEXT: vsetvli zero, a0, e64, m1, ta, ma ; CHECK-NEXT: vsmul.vv v8, v8, v9, v0.t ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma ; CHECK-NEXT: vadd.vv v8, v8, v8 ; CHECK-NEXT: ret %a = call @llvm.riscv.vsmul.mask.nxv1i64.nxv1i64( poison, %0, %1, %mask, iXLen 0, iXLen %2, iXLen 1) %b = call @llvm.riscv.vsmul.mask.nxv1i64.nxv1i64( poison, %1, %0, %mask, iXLen 0, iXLen %2, iXLen 1) %ret = add %a, %b ret %ret }