diff options
author | Craig Topper <craig.topper@sifive.com> | 2023-01-27 15:52:31 -0800 |
---|---|---|
committer | Craig Topper <craig.topper@sifive.com> | 2023-01-27 16:26:38 -0800 |
commit | 1ce3afd97784b7e638c6a562451dc008a6bc3907 (patch) | |
tree | 286c1c9e4491959baa75ecb8106d874d87b21280 | |
parent | c5fa6b1610a3a665c22f70314ed65e7230091028 (diff) | |
download | llvm-1ce3afd97784b7e638c6a562451dc008a6bc3907.zip llvm-1ce3afd97784b7e638c6a562451dc008a6bc3907.tar.gz llvm-1ce3afd97784b7e638c6a562451dc008a6bc3907.tar.bz2 |
[ValueTracking] Teach computeKnownBits about riscv.vsetvli.opt and riscv.vsetvlimax.opt intrinsics.
These are like the intrinsic without opt, but don't have side effects.
Add missing test cases for riscv.vsetvlimax.
-rw-r--r-- | llvm/lib/Analysis/ValueTracking.cpp | 2 | ||||
-rw-r--r-- | llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll | 120 |
2 files changed, 122 insertions, 0 deletions
diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp index 4f1b282..e0af42d 100644 --- a/llvm/lib/Analysis/ValueTracking.cpp +++ b/llvm/lib/Analysis/ValueTracking.cpp @@ -1735,7 +1735,9 @@ static void computeKnownBitsFromOperator(const Operator *I, Known.Zero.setBitsFrom(32); break; case Intrinsic::riscv_vsetvli: + case Intrinsic::riscv_vsetvli_opt: case Intrinsic::riscv_vsetvlimax: + case Intrinsic::riscv_vsetvlimax_opt: // Assume that VL output is positive and would fit in an int32_t. // TODO: VLEN might be capped at 16 bits in a future V spec update. if (BitWidth >= 32) diff --git a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll index 4a81082..3e3be9b 100644 --- a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll +++ b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll @@ -3,6 +3,12 @@ declare i32 @llvm.riscv.vsetvli.i32(i32, i32, i32) declare i64 @llvm.riscv.vsetvli.i64(i64, i64, i64) +declare i32 @llvm.riscv.vsetvlimax.i32(i32, i32) +declare i64 @llvm.riscv.vsetvlimax.i64(i64, i64) +declare i32 @llvm.riscv.vsetvli.opt.i32(i32, i32, i32) +declare i64 @llvm.riscv.vsetvli.opt.i64(i64, i64, i64) +declare i32 @llvm.riscv.vsetvlimax.opt.i32(i32, i32) +declare i64 @llvm.riscv.vsetvlimax.opt.i64(i64, i64) define i32 @vsetvli_i32() nounwind { ; CHECK-LABEL: @vsetvli_i32( @@ -41,3 +47,117 @@ entry: %2 = zext i32 %1 to i64 ret i64 %2 } + +define i32 @vsetvlimax_i32() nounwind { +; CHECK-LABEL: @vsetvlimax_i32( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1) +; CHECK-NEXT: ret i32 [[TMP0]] +; +entry: + %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1) + %1 = and i32 %0, 2147483647 + ret i32 %1 +} + +define i64 @vsetvlimax_sext_i64() nounwind { +; CHECK-LABEL: @vsetvlimax_sext_i64( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1) +; CHECK-NEXT: ret i64 [[TMP0]] +; +entry: + %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1) + %1 = trunc i64 %0 to i32 + %2 = sext i32 %1 to i64 + ret i64 %2 +} + +define i64 @vsetvlimax_zext_i64() nounwind { +; CHECK-LABEL: @vsetvlimax_zext_i64( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1) +; CHECK-NEXT: ret i64 [[TMP0]] +; +entry: + %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1) + %1 = trunc i64 %0 to i32 + %2 = zext i32 %1 to i64 + ret i64 %2 +} + +define i32 @vsetvli_opt_i32() nounwind { +; CHECK-LABEL: @vsetvli_opt_i32( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvli.opt.i32(i32 1, i32 1, i32 1) +; CHECK-NEXT: ret i32 [[TMP0]] +; +entry: + %0 = call i32 @llvm.riscv.vsetvli.opt.i32(i32 1, i32 1, i32 1) + %1 = and i32 %0, 2147483647 + ret i32 %1 +} + +define i64 @vsetvli_opt_sext_i64() nounwind { +; CHECK-LABEL: @vsetvli_opt_sext_i64( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.opt.i64(i64 1, i64 1, i64 1) +; CHECK-NEXT: ret i64 [[TMP0]] +; +entry: + %0 = call i64 @llvm.riscv.vsetvli.opt.i64(i64 1, i64 1, i64 1) + %1 = trunc i64 %0 to i32 + %2 = sext i32 %1 to i64 + ret i64 %2 +} + +define i64 @vsetvli_opt_zext_i64() nounwind { +; CHECK-LABEL: @vsetvli_opt_zext_i64( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.opt.i64(i64 1, i64 1, i64 1) +; CHECK-NEXT: ret i64 [[TMP0]] +; +entry: + %0 = call i64 @llvm.riscv.vsetvli.opt.i64(i64 1, i64 1, i64 1) + %1 = trunc i64 %0 to i32 + %2 = zext i32 %1 to i64 + ret i64 %2 +} + +define i32 @vsetvlimax_opt_i32() nounwind { +; CHECK-LABEL: @vsetvlimax_opt_i32( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.opt.i32(i32 1, i32 1) +; CHECK-NEXT: ret i32 [[TMP0]] +; +entry: + %0 = call i32 @llvm.riscv.vsetvlimax.opt.i32(i32 1, i32 1) + %1 = and i32 %0, 2147483647 + ret i32 %1 +} + +define i64 @vsetvlimax_opt_sext_i64() nounwind { +; CHECK-LABEL: @vsetvlimax_opt_sext_i64( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 1, i64 1) +; CHECK-NEXT: ret i64 [[TMP0]] +; +entry: + %0 = call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 1, i64 1) + %1 = trunc i64 %0 to i32 + %2 = sext i32 %1 to i64 + ret i64 %2 +} + +define i64 @vsetvlimax_opt_zext_i64() nounwind { +; CHECK-LABEL: @vsetvlimax_opt_zext_i64( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 1, i64 1) +; CHECK-NEXT: ret i64 [[TMP0]] +; +entry: + %0 = call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 1, i64 1) + %1 = trunc i64 %0 to i32 + %2 = zext i32 %1 to i64 + ret i64 %2 +} |