aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMichael Hayes <m.hayes@elec.canterbury.ac.nz>2004-01-26 00:31:50 +0000
committerMichael Hayes <m.hayes@gcc.gnu.org>2004-01-26 00:31:50 +0000
commita026b9d64dfbcdd8efd4ffde6f435274f529765c (patch)
tree8befc5c1576f5b27d786d74f3d06ad0efb0c7fac
parentb6ab76d653319cd1136a4a619d714d45c9ce12f0 (diff)
downloadgcc-a026b9d64dfbcdd8efd4ffde6f435274f529765c.zip
gcc-a026b9d64dfbcdd8efd4ffde6f435274f529765c.tar.gz
gcc-a026b9d64dfbcdd8efd4ffde6f435274f529765c.tar.bz2
* config/c4x/c4x.c (legitimize_operands): Truncate invalid shift counts.
From-SVN: r76610
-rw-r--r--gcc/ChangeLog4
-rw-r--r--gcc/config/c4x/c4x.c13
2 files changed, 16 insertions, 1 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 9173a2e..6a4dac9 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,9 @@
2004-01-26 Michael Hayes <m.hayes@elec.canterbury.ac.nz>
+ * config/c4x/c4x.c (legitimize_operands): Truncate invalid shift counts.
+
+2004-01-26 Michael Hayes <m.hayes@elec.canterbury.ac.nz>
+
* config/c4x/c4x.c (c4x_valid_operands): More aggressively reject
invalid operand combinations.
diff --git a/gcc/config/c4x/c4x.c b/gcc/config/c4x/c4x.c
index 31c1e83..db350ce 100644
--- a/gcc/config/c4x/c4x.c
+++ b/gcc/config/c4x/c4x.c
@@ -3944,7 +3944,7 @@ legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
/* We can get better code on a C30 if we force constant shift counts
into a register. This way they can get hoisted out of loops,
- tying up a register, but saving an instruction. The downside is
+ tying up a register but saving an instruction. The downside is
that they may get allocated to an address or index register, and
thus we will get a pipeline conflict if there is a nearby
indirect address using an address register.
@@ -3978,6 +3978,17 @@ legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
&& (GET_CODE (operands[2]) != CONST_INT))
operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
+
+ /* When the shift count is greater than 32 then the result
+ can be implementation dependent. We truncate the result to
+ fit in 5 bits so that we do not emit invalid code when
+ optimising---such as trying to generate lhu2 with 20021124-1.c. */
+ if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
+ && (GET_CODE (operands[2]) == CONST_INT))
+ && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
+ operands[2]
+ = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
+
return 1;
}