aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorStefan Schulze Frielinghaus <stefansf@linux.ibm.com>2023-10-01 16:11:32 +0200
committerStefan Schulze Frielinghaus <stefansf@linux.ibm.com>2023-10-01 16:11:32 +0200
commit86b2ffc0b8334c86ed13974f7d986821040474a4 (patch)
treec3c19eac440a0d9b5056d820f00b2d60665dc8a1
parente4a4b8e983bac865eb435b11798e38d633b98942 (diff)
downloadgcc-86b2ffc0b8334c86ed13974f7d986821040474a4.zip
gcc-86b2ffc0b8334c86ed13974f7d986821040474a4.tar.gz
gcc-86b2ffc0b8334c86ed13974f7d986821040474a4.tar.bz2
rtl-optimization/110939 Really fix narrow comparison of memory and constant
In the former fix in commit 41ef5a34161356817807be3a2e51fbdbe575ae85 I completely missed the fact that the normal form of a CONST_INT for a mode with fewer bits than in HOST_WIDE_INT is a sign extended version of the actual constant. This even holds true for unsigned constants. Fixed by masking out the upper bits for the incoming constant and sign extending the resulting unsigned constant. gcc/ChangeLog: * combine.cc (simplify_compare_const): Properly handle unsigned constants while narrowing comparison of memory and constants.
-rw-r--r--gcc/combine.cc19
1 files changed, 10 insertions, 9 deletions
diff --git a/gcc/combine.cc b/gcc/combine.cc
index e46d202..468b7fd 100644
--- a/gcc/combine.cc
+++ b/gcc/combine.cc
@@ -12003,14 +12003,15 @@ simplify_compare_const (enum rtx_code code, machine_mode mode,
&& !MEM_VOLATILE_P (op0)
/* The optimization makes only sense for constants which are big enough
so that we have a chance to chop off something at all. */
- && (unsigned HOST_WIDE_INT) const_op > 0xff
- /* Bail out, if the constant does not fit into INT_MODE. */
- && (unsigned HOST_WIDE_INT) const_op
- < ((HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1) << 1) - 1)
+ && ((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode)) > 0xff
/* Ensure that we do not overflow during normalization. */
- && (code != GTU || (unsigned HOST_WIDE_INT) const_op < HOST_WIDE_INT_M1U))
+ && (code != GTU
+ || ((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode))
+ < HOST_WIDE_INT_M1U)
+ && trunc_int_for_mode (const_op, int_mode) == const_op)
{
- unsigned HOST_WIDE_INT n = (unsigned HOST_WIDE_INT) const_op;
+ unsigned HOST_WIDE_INT n
+ = (unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode);
enum rtx_code adjusted_code;
/* Normalize code to either LEU or GEU. */
@@ -12051,15 +12052,15 @@ simplify_compare_const (enum rtx_code code, machine_mode mode,
HOST_WIDE_INT_PRINT_HEX ") to (MEM %s "
HOST_WIDE_INT_PRINT_HEX ").\n", GET_MODE_NAME (int_mode),
GET_MODE_NAME (narrow_mode_iter), GET_RTX_NAME (code),
- (unsigned HOST_WIDE_INT)const_op, GET_RTX_NAME (adjusted_code),
- n);
+ (unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode),
+ GET_RTX_NAME (adjusted_code), n);
}
poly_int64 offset = (BYTES_BIG_ENDIAN
? 0
: (GET_MODE_SIZE (int_mode)
- GET_MODE_SIZE (narrow_mode_iter)));
*pop0 = adjust_address_nv (op0, narrow_mode_iter, offset);
- *pop1 = GEN_INT (n);
+ *pop1 = gen_int_mode (n, narrow_mode_iter);
return adjusted_code;
}
}