aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorAlexandre Oliva <oliva@adacore.com>2025-01-13 14:53:25 -0300
committerAlexandre Oliva <oliva@gnu.org>2025-01-14 13:14:56 -0300
commit22fe3c05d86b52c35850918bfb21e1f597e1b5c7 (patch)
tree01ff7b06230e5ec8b99f30552b019162467063c6 /gcc
parente5e9e50fc6816713d012f1d96ae308a0946d5a14 (diff)
downloadgcc-22fe3c05d86b52c35850918bfb21e1f597e1b5c7.zip
gcc-22fe3c05d86b52c35850918bfb21e1f597e1b5c7.tar.gz
gcc-22fe3c05d86b52c35850918bfb21e1f597e1b5c7.tar.bz2
[ifcombine] check and extend constants to compare with bitfields
Add logic to check and extend constants compared with bitfields, so that fields are only compared with constants they could actually equal. This involves making sure the signedness doesn't change between loads and conversions before shifts: we'd need to carry a lot more data to deal with all the possibilities. for gcc/ChangeLog PR tree-optimization/118456 * gimple-fold.cc (decode_field_reference): Punt if shifting after changing signedness. (fold_truth_andor_for_ifcombine): Check extension bits in constants before clipping. for gcc/testsuite/ChangeLog PR tree-optimization/118456 * gcc.dg/field-merge-21.c: New. * gcc.dg/field-merge-22.c: New.
Diffstat (limited to 'gcc')
-rw-r--r--gcc/gimple-fold.cc40
-rw-r--r--gcc/testsuite/gcc.dg/field-merge-21.c53
-rw-r--r--gcc/testsuite/gcc.dg/field-merge-22.c31
3 files changed, 122 insertions, 2 deletions
diff --git a/gcc/gimple-fold.cc b/gcc/gimple-fold.cc
index 93ed8b3..5b1fbe6 100644
--- a/gcc/gimple-fold.cc
+++ b/gcc/gimple-fold.cc
@@ -7712,6 +7712,18 @@ decode_field_reference (tree *pexp, HOST_WIDE_INT *pbitsize,
if (shiftrt)
{
+ /* Punt if we're shifting by more than the loaded bitfield (after
+ adjustment), or if there's a shift after a change of signedness, punt.
+ When comparing this field with a constant, we'll check that the
+ constant is a proper sign- or zero-extension (depending on signedness)
+ of a value that would fit in the selected portion of the bitfield. A
+ shift after a change of signedness would make the extension
+ non-uniform, and we can't deal with that (yet ???). See
+ gcc.dg/field-merge-22.c for a test that would go wrong. */
+ if (*pbitsize <= shiftrt
+ || (convert_before_shift
+ && outer_type && unsignedp != TYPE_UNSIGNED (outer_type)))
+ return NULL_TREE;
if (!*preversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
*pbitpos += shiftrt;
*pbitsize -= shiftrt;
@@ -8512,13 +8524,25 @@ fold_truth_andor_for_ifcombine (enum tree_code code, tree truth_type,
and bit position. */
if (l_const.get_precision ())
{
+ /* Before clipping upper bits of the right-hand operand of the compare,
+ check that they're sign or zero extensions, depending on how the
+ left-hand operand would be extended. */
+ bool l_non_ext_bits = false;
+ if (ll_bitsize < lr_bitsize)
+ {
+ wide_int zext = wi::zext (l_const, ll_bitsize);
+ if ((ll_unsignedp ? zext : wi::sext (l_const, ll_bitsize)) == l_const)
+ l_const = zext;
+ else
+ l_non_ext_bits = true;
+ }
/* We're doing bitwise equality tests, so don't bother with sign
extensions. */
l_const = wide_int::from (l_const, lnprec, UNSIGNED);
if (ll_and_mask.get_precision ())
l_const &= wide_int::from (ll_and_mask, lnprec, UNSIGNED);
l_const <<= xll_bitpos;
- if ((l_const & ~ll_mask) != 0)
+ if (l_non_ext_bits || (l_const & ~ll_mask) != 0)
{
warning_at (lloc, OPT_Wtautological_compare,
"comparison is always %d", wanted_code == NE_EXPR);
@@ -8530,11 +8554,23 @@ fold_truth_andor_for_ifcombine (enum tree_code code, tree truth_type,
again. */
gcc_checking_assert (r_const.get_precision ());
+ /* Before clipping upper bits of the right-hand operand of the compare,
+ check that they're sign or zero extensions, depending on how the
+ left-hand operand would be extended. */
+ bool r_non_ext_bits = false;
+ if (rl_bitsize < rr_bitsize)
+ {
+ wide_int zext = wi::zext (r_const, rl_bitsize);
+ if ((rl_unsignedp ? zext : wi::sext (r_const, rl_bitsize)) == r_const)
+ r_const = zext;
+ else
+ r_non_ext_bits = true;
+ }
r_const = wide_int::from (r_const, lnprec, UNSIGNED);
if (rl_and_mask.get_precision ())
r_const &= wide_int::from (rl_and_mask, lnprec, UNSIGNED);
r_const <<= xrl_bitpos;
- if ((r_const & ~rl_mask) != 0)
+ if (r_non_ext_bits || (r_const & ~rl_mask) != 0)
{
warning_at (rloc, OPT_Wtautological_compare,
"comparison is always %d", wanted_code == NE_EXPR);
diff --git a/gcc/testsuite/gcc.dg/field-merge-21.c b/gcc/testsuite/gcc.dg/field-merge-21.c
new file mode 100644
index 0000000..042b212
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/field-merge-21.c
@@ -0,0 +1,53 @@
+/* { dg-do run } */
+/* { dg-options "-O2" } */
+
+/* PR tree-optimization/118456 */
+/* Check that shifted fields compared with a constants compare correctly even
+ if the constant contains sign-extension bits not present in the bit
+ range. */
+
+struct S { unsigned long long o; unsigned short a, b; } s;
+
+__attribute__((noipa)) int
+foo (void)
+{
+ return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == -27;
+}
+
+__attribute__((noipa)) int
+bar (void)
+{
+ return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == -91;
+}
+
+__attribute__((noipa)) int
+bars (void)
+{
+ return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == 37;
+}
+
+__attribute__((noipa)) int
+baz (void)
+{
+ return ((unsigned char) s.a) >> 3 == 49 && ((signed char) s.b) >> 2 == -27;
+}
+
+__attribute__((noipa)) int
+bazs (void)
+{
+ return ((unsigned char) s.a) >> 3 == (unsigned char) -15 && ((signed char) s.b) >> 2 == -27;
+}
+
+int
+main ()
+{
+ s.a = 17 << 3;
+ s.b = (unsigned short)(-27u << 2);
+ if (foo () != 1
+ || bar () != 0
+ || bars () != 0
+ || baz () != 0
+ || bazs () != 0)
+ __builtin_abort ();
+ return 0;
+}
diff --git a/gcc/testsuite/gcc.dg/field-merge-22.c b/gcc/testsuite/gcc.dg/field-merge-22.c
new file mode 100644
index 0000000..45b29c0
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/field-merge-22.c
@@ -0,0 +1,31 @@
+/* { dg-do run } */
+/* { dg-options "-O2" } */
+
+/* PR tree-optimization/118456 */
+/* Check that compares with constants take into account sign/zero extension of
+ both the bitfield and of the shifting type. */
+
+#define shift (__CHAR_BIT__ - 4)
+
+struct S {
+ signed char a : shift + 2;
+ signed char b : shift + 2;
+ short ignore[0];
+} s;
+
+__attribute__((noipa)) int
+foo (void)
+{
+ return ((unsigned char) s.a) >> shift == 15
+ && ((unsigned char) s.b) >> shift == 0;
+}
+
+int
+main ()
+{
+ s.a = -1;
+ s.b = 1;
+ if (foo () != 1)
+ __builtin_abort ();
+ return 0;
+}