diff options
| author | Amara Emerson <amara@apple.com> | 2022-07-27 22:10:42 -0700 |
|---|---|---|
| committer | Amara Emerson <amara@apple.com> | 2022-07-27 22:10:42 -0700 |
| commit | 93e3aeb9a84f489d632a6d494813ed4fe2cb6865 (patch) | |
| tree | 412d7666a16893abe300a3d6675c43f0ad68222c | |
| parent | c16fa781f47378a63a515d07c169213573ecd72e (diff) | |
| download | llvm-93e3aeb9a84f489d632a6d494813ed4fe2cb6865.zip llvm-93e3aeb9a84f489d632a6d494813ed4fe2cb6865.tar.gz llvm-93e3aeb9a84f489d632a6d494813ed4fe2cb6865.tar.bz2 | |
[AArch64][GlobalISel] Fix custom legalization of rotates using sext for shift vs zext.
Rotates are defined according to DAG documentation as having unsigned shifts,
so we need to zero-extend instead of sign-extend here.
Fixes issue 56664
| -rw-r--r-- | llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp | 2 | ||||
| -rw-r--r-- | llvm/test/CodeGen/AArch64/GlobalISel/legalize-rotr-rotl.mir | 8 |
2 files changed, 5 insertions, 5 deletions
diff --git a/llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp b/llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp index 380d362..b0f7b14 100644 --- a/llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp +++ b/llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp @@ -875,7 +875,7 @@ bool AArch64LegalizerInfo::legalizeRotate(MachineInstr &MI, (void)AmtTy; assert(AmtTy.isScalar() && "Expected a scalar rotate"); assert(AmtTy.getSizeInBits() < 64 && "Expected this rotate to be legal"); - auto NewAmt = Helper.MIRBuilder.buildSExt(LLT::scalar(64), AmtReg); + auto NewAmt = Helper.MIRBuilder.buildZExt(LLT::scalar(64), AmtReg); Helper.Observer.changingInstr(MI); MI.getOperand(2).setReg(NewAmt.getReg(0)); Helper.Observer.changedInstr(MI); diff --git a/llvm/test/CodeGen/AArch64/GlobalISel/legalize-rotr-rotl.mir b/llvm/test/CodeGen/AArch64/GlobalISel/legalize-rotr-rotl.mir index 79aaed5..d2e8f15 100644 --- a/llvm/test/CodeGen/AArch64/GlobalISel/legalize-rotr-rotl.mir +++ b/llvm/test/CodeGen/AArch64/GlobalISel/legalize-rotr-rotl.mir @@ -12,8 +12,8 @@ body: | ; CHECK-NEXT: {{ $}} ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $w0 ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $w1 - ; CHECK-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[COPY1]](s32) - ; CHECK-NEXT: %rot:_(s32) = G_ROTR [[COPY]], [[SEXT]](s64) + ; CHECK-NEXT: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY1]](s32) + ; CHECK-NEXT: %rot:_(s32) = G_ROTR [[COPY]], [[ZEXT]](s64) ; CHECK-NEXT: $w0 = COPY %rot(s32) ; CHECK-NEXT: RET_ReallyLR implicit $w0 %0:_(s32) = COPY $w0 @@ -59,8 +59,8 @@ body: | ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $w1 ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0 ; CHECK-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[COPY1]] - ; CHECK-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[SUB]](s32) - ; CHECK-NEXT: %rot:_(s32) = G_ROTR [[COPY]], [[SEXT]](s64) + ; CHECK-NEXT: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[SUB]](s32) + ; CHECK-NEXT: %rot:_(s32) = G_ROTR [[COPY]], [[ZEXT]](s64) ; CHECK-NEXT: $w0 = COPY %rot(s32) ; CHECK-NEXT: RET_ReallyLR implicit $w0 %0:_(s32) = COPY $w0 |
