aboutsummaryrefslogtreecommitdiff
path: root/gcc/simplify-rtx.c
diff options
context:
space:
mode:
authorRichard Sandiford <rsandifo@redhat.com>2004-09-04 08:50:36 +0000
committerRichard Sandiford <rsandifo@gcc.gnu.org>2004-09-04 08:50:36 +0000
commit273a2526bdccb1cde0f332d254616982cc142a64 (patch)
tree4320b41f468e28f9439feab3f57dff953085081c /gcc/simplify-rtx.c
parentcaf29de78e1a609a86f4f883db628e97f51b96c8 (diff)
downloadgcc-273a2526bdccb1cde0f332d254616982cc142a64.zip
gcc-273a2526bdccb1cde0f332d254616982cc142a64.tar.gz
gcc-273a2526bdccb1cde0f332d254616982cc142a64.tar.bz2
md.texi (shift patterns): New anchor.
* doc/md.texi (shift patterns): New anchor. Add reference to TARGET_SHIFT_TRUNCATION_MASK. * doc/tm.texi (TARGET_SHIFT_TRUNCATION_MASK): Document. * target.h (shift_truncation_mask): New target hook. * targhook.h (default_shift_truncation_mask): Declare. * targhook.c (default_shift_truncation_mask): Define. * target-def.h (TARGET_SHIFT_TRUNCATION_MASK): Define. (TARGET_INITIALIZER): Include it. * simplify-rtx.c (simplify_binary_operation): Combine ASHIFT, ASHIFTRT and LSHIFTRT cases. Truncate arg1 if SHIFT_COUNT_TRUNCATED, otherwise reject all out-of-range values. Fix sign-extension code for modes whose width is smaller than HOST_BITS_PER_WIDE_INT. * optabs.c (simplify_expand_binop, force_expand_binop): New functions. (expand_superword_shift, expand_subword_shift): Likewise. (expand_doubleword_shift_condmove, expand_doubleword_shift): Likewise. (expand_binop): Use them to implement double-word shifts. * config/arm/arm.c (arm_shift_truncation_mask): New function. (TARGET_SHIFT_TRUNCATION_MASK): Define. From-SVN: r87079
Diffstat (limited to 'gcc/simplify-rtx.c')
-rw-r--r--gcc/simplify-rtx.c45
1 files changed, 15 insertions, 30 deletions
diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c
index a0f1769..285f898 100644
--- a/gcc/simplify-rtx.c
+++ b/gcc/simplify-rtx.c
@@ -2343,41 +2343,26 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode,
break;
case LSHIFTRT:
- /* If shift count is undefined, don't fold it; let the machine do
- what it wants. But truncate it if the machine will do that. */
- if (arg1 < 0)
- return 0;
-
- if (SHIFT_COUNT_TRUNCATED)
- arg1 %= width;
-
- val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
- break;
-
case ASHIFT:
- if (arg1 < 0)
- return 0;
-
- if (SHIFT_COUNT_TRUNCATED)
- arg1 %= width;
-
- val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
- break;
-
case ASHIFTRT:
- if (arg1 < 0)
- return 0;
-
+ /* Truncate the shift if SHIFT_COUNT_TRUNCATED, otherwise make sure the
+ value is in range. We can't return any old value for out-of-range
+ arguments because either the middle-end (via shift_truncation_mask)
+ or the back-end might be relying on target-specific knowledge.
+ Nor can we rely on shift_truncation_mask, since the shift might
+ not be part of an ashlM3, lshrM3 or ashrM3 instruction. */
if (SHIFT_COUNT_TRUNCATED)
- arg1 %= width;
-
- val = arg0s >> arg1;
+ arg1 = (unsigned HOST_WIDE_INT) arg1 % width;
+ else if (arg1 < 0 || arg1 >= GET_MODE_BITSIZE (mode))
+ return 0;
- /* Bootstrap compiler may not have sign extended the right shift.
- Manually extend the sign to insure bootstrap cc matches gcc. */
- if (arg0s < 0 && arg1 > 0)
- val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
+ val = (code == ASHIFT
+ ? ((unsigned HOST_WIDE_INT) arg0) << arg1
+ : ((unsigned HOST_WIDE_INT) arg0) >> arg1);
+ /* Sign-extend the result for arithmetic right shifts. */
+ if (code == ASHIFTRT && arg0s < 0 && arg1 > 0)
+ val |= ((HOST_WIDE_INT) -1) << (width - arg1);
break;
case ROTATERT: