aboutsummaryrefslogtreecommitdiff
path: root/gcc/gimple-fold.c
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2020-07-20 10:24:19 +0200
committerJakub Jelinek <jakub@redhat.com>2020-07-20 10:24:19 +0200
commite4f1cbc35b1e823a0a6e58eeca61c8c6ca351875 (patch)
treebaee51a3e02e958107cd446bb1587507df747e58 /gcc/gimple-fold.c
parent83b171655dc0785262a444f5d5c2be7195977987 (diff)
downloadgcc-e4f1cbc35b1e823a0a6e58eeca61c8c6ca351875.zip
gcc-e4f1cbc35b1e823a0a6e58eeca61c8c6ca351875.tar.gz
gcc-e4f1cbc35b1e823a0a6e58eeca61c8c6ca351875.tar.bz2
gimple-fold: Handle bitfields in fold_const_aggregate_ref_1 [PR93121]
When working on __builtin_bit_cast that needs to handle bitfields too, I've made the following change to handle at least some bitfields in fold_const_aggregate_ref_1 (those that have integral representative). It already handles some, but only those that start and end at byte boundaries. 2020-07-20 Jakub Jelinek <jakub@redhat.com> PR libstdc++/93121 * gimple-fold.c (fold_const_aggregate_ref_1): For COMPONENT_REF of a bitfield not aligned on byte boundaries try to fold_ctor_reference DECL_BIT_FIELD_REPRESENTATIVE if any and adjust it depending on endianity. * gcc.dg/tree-ssa/pr93121-2.c: New test.
Diffstat (limited to 'gcc/gimple-fold.c')
-rw-r--r--gcc/gimple-fold.c60
1 files changed, 58 insertions, 2 deletions
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index dfda6db..81c77f7 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -7189,8 +7189,64 @@ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
if (maybe_lt (offset, 0))
return NULL_TREE;
- return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
- base);
+ tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
+ if (tem)
+ return tem;
+
+ /* For bit field reads try to read the representative and
+ adjust. */
+ if (TREE_CODE (t) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
+ && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
+ {
+ HOST_WIDE_INT csize, coffset;
+ tree field = TREE_OPERAND (t, 1);
+ tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
+ if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
+ && size.is_constant (&csize)
+ && offset.is_constant (&coffset)
+ && (coffset % BITS_PER_UNIT != 0
+ || csize % BITS_PER_UNIT != 0)
+ && !reverse
+ && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
+ {
+ poly_int64 bitoffset;
+ poly_uint64 field_offset, repr_offset;
+ if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
+ && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
+ bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
+ else
+ bitoffset = 0;
+ bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
+ - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
+ HOST_WIDE_INT bitoff;
+ int diff = (TYPE_PRECISION (TREE_TYPE (repr))
+ - TYPE_PRECISION (TREE_TYPE (field)));
+ if (bitoffset.is_constant (&bitoff)
+ && bitoff >= 0
+ && bitoff <= diff)
+ {
+ offset -= bitoff;
+ size = tree_to_uhwi (DECL_SIZE (repr));
+
+ tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
+ size, base);
+ if (tem && TREE_CODE (tem) == INTEGER_CST)
+ {
+ if (!BYTES_BIG_ENDIAN)
+ tem = wide_int_to_tree (TREE_TYPE (field),
+ wi::lrshift (wi::to_wide (tem),
+ bitoff));
+ else
+ tem = wide_int_to_tree (TREE_TYPE (field),
+ wi::lrshift (wi::to_wide (tem),
+ diff - bitoff));
+ return tem;
+ }
+ }
+ }
+ }
+ break;
case REALPART_EXPR:
case IMAGPART_EXPR: