aboutsummaryrefslogtreecommitdiff
path: root/gcc/gimple-fold.c
diff options
context:
space:
mode:
authorMartin Sebor <msebor@redhat.com>2018-07-09 20:33:48 +0000
committerMartin Sebor <msebor@gcc.gnu.org>2018-07-09 14:33:48 -0600
commit35b4d3a644222b7bd69b3a1e9c00e78f3dbf3eba (patch)
treea540d226c0eeee38bd5ea06483ae688c1e0c070e /gcc/gimple-fold.c
parentaad2444d346d4ae504a938de8708341dd1889aed (diff)
downloadgcc-35b4d3a644222b7bd69b3a1e9c00e78f3dbf3eba.zip
gcc-35b4d3a644222b7bd69b3a1e9c00e78f3dbf3eba.tar.gz
gcc-35b4d3a644222b7bd69b3a1e9c00e78f3dbf3eba.tar.bz2
PR middle-end/77357 - strlen of constant strings not folded
gcc/ChangeLog: PR middle-end/77357 PR middle-end/86428 * builtins.c (c_strlen): Avoid out-of-bounds warnings when accessing implicitly initialized array elements. * expr.c (string_constant): Handle string initializers of character arrays within aggregates. * gimple-fold.c (fold_array_ctor_reference): Add argument. Store element offset. As a special case, handle zero size. (fold_nonarray_ctor_reference): Same. (fold_ctor_reference): Add argument. Store subobject offset. * gimple-fold.h (fold_ctor_reference): Add argument. gcc/testsuite/ChangeLog: PR middle-end/77357 * gcc.dg/strlenopt-49.c: New test. * gcc.dg/strlenopt-50.c: New test. * gcc.dg/strlenopt-51.c: New test. * gcc.dg/strlenopt-52.c: New test. From-SVN: r262522
Diffstat (limited to 'gcc/gimple-fold.c')
-rw-r--r--gcc/gimple-fold.c145
1 files changed, 106 insertions, 39 deletions
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index 6ce34bf..a6b4283 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -6476,14 +6476,19 @@ get_base_constructor (tree base, poly_int64_pod *bit_offset,
}
}
-/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
- SIZE to the memory at bit OFFSET. */
+/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
+ to the memory at bit OFFSET. When non-null, TYPE is the expected
+ type of the reference; otherwise the type of the referenced element
+ is used instead. When SIZE is zero, attempt to fold a reference to
+ the entire element which OFFSET refers to. Increment *SUBOFF by
+ the bit offset of the accessed element. */
static tree
fold_array_ctor_reference (tree type, tree ctor,
unsigned HOST_WIDE_INT offset,
unsigned HOST_WIDE_INT size,
- tree from_decl)
+ tree from_decl,
+ unsigned HOST_WIDE_INT *suboff)
{
offset_int low_bound;
offset_int elt_size;
@@ -6508,12 +6513,13 @@ fold_array_ctor_reference (tree type, tree ctor,
return NULL_TREE;
elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
- /* We can handle only constantly sized accesses that are known to not
- be larger than size of array element. */
- if (!TYPE_SIZE_UNIT (type)
- || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
- || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
- || elt_size == 0)
+ /* When TYPE is non-null, verify that it specifies a constant-sized
+ accessed not larger than size of array element. */
+ if (type
+ && (!TYPE_SIZE_UNIT (type)
+ || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
+ || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
+ || elt_size == 0))
return NULL_TREE;
/* Compute the array index we look for. */
@@ -6529,21 +6535,42 @@ fold_array_ctor_reference (tree type, tree ctor,
if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
return NULL_TREE;
if (tree val = get_array_ctor_element_at_index (ctor, access_index))
- return fold_ctor_reference (type, val, inner_offset, size, from_decl);
+ {
+ if (!size && TREE_CODE (val) != CONSTRUCTOR)
+ {
+ /* For the final reference to the entire accessed element
+ (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
+ may be null) in favor of the type of the element, and set
+ SIZE to the size of the accessed element. */
+ inner_offset = 0;
+ type = TREE_TYPE (val);
+ size = elt_size.to_uhwi () * BITS_PER_UNIT;
+ }
+
+ *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
+ return fold_ctor_reference (type, val, inner_offset, size, from_decl,
+ suboff);
+ }
- /* When memory is not explicitely mentioned in constructor,
- it is 0 (or out of range). */
- return build_zero_cst (type);
+ /* Memory not explicitly mentioned in constructor is 0 (or
+ the reference is out of range). */
+ return type ? build_zero_cst (type) : NULL_TREE;
}
-/* CTOR is CONSTRUCTOR of an aggregate or vector.
- Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
+/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
+ of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
+ is the expected type of the reference; otherwise the type of
+ the referenced member is used instead. When SIZE is zero,
+ attempt to fold a reference to the entire member which OFFSET
+ refers to; in this case. Increment *SUBOFF by the bit offset
+ of the accessed member. */
static tree
fold_nonarray_ctor_reference (tree type, tree ctor,
unsigned HOST_WIDE_INT offset,
unsigned HOST_WIDE_INT size,
- tree from_decl)
+ tree from_decl,
+ unsigned HOST_WIDE_INT *suboff)
{
unsigned HOST_WIDE_INT cnt;
tree cfield, cval;
@@ -6554,8 +6581,13 @@ fold_nonarray_ctor_reference (tree type, tree ctor,
tree byte_offset = DECL_FIELD_OFFSET (cfield);
tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
tree field_size = DECL_SIZE (cfield);
- offset_int bitoffset;
- offset_int bitoffset_end, access_end;
+
+ if (!field_size)
+ {
+ /* Determine the size of the flexible array member from
+ the size of the initializer provided for it. */
+ field_size = TYPE_SIZE (TREE_TYPE (cval));
+ }
/* Variable sized objects in static constructors makes no sense,
but field_size can be NULL for flexible array members. */
@@ -6566,50 +6598,82 @@ fold_nonarray_ctor_reference (tree type, tree ctor,
: TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
/* Compute bit offset of the field. */
- bitoffset = (wi::to_offset (field_offset)
- + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
+ offset_int bitoffset
+ = (wi::to_offset (field_offset)
+ + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
/* Compute bit offset where the field ends. */
+ offset_int bitoffset_end;
if (field_size != NULL_TREE)
bitoffset_end = bitoffset + wi::to_offset (field_size);
else
bitoffset_end = 0;
- access_end = offset_int (offset) + size;
+ /* Compute the bit offset of the end of the desired access.
+ As a special case, if the size of the desired access is
+ zero, assume the access is to the entire field (and let
+ the caller make any necessary adjustments by storing
+ the actual bounds of the field in FIELDBOUNDS). */
+ offset_int access_end = offset_int (offset);
+ if (size)
+ access_end += size;
+ else
+ access_end = bitoffset_end;
- /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
- [BITOFFSET, BITOFFSET_END)? */
+ /* Is there any overlap between the desired access at
+ [OFFSET, OFFSET+SIZE) and the offset of the field within
+ the object at [BITOFFSET, BITOFFSET_END)? */
if (wi::cmps (access_end, bitoffset) > 0
&& (field_size == NULL_TREE
|| wi::lts_p (offset, bitoffset_end)))
{
- offset_int inner_offset = offset_int (offset) - bitoffset;
- /* We do have overlap. Now see if field is large enough to
- cover the access. Give up for accesses spanning multiple
- fields. */
+ *suboff += bitoffset.to_uhwi ();
+
+ if (!size && TREE_CODE (cval) != CONSTRUCTOR)
+ {
+ /* For the final reference to the entire accessed member
+ (SIZE is zero), reset OFFSET, disegard TYPE (which may
+ be null) in favor of the type of the member, and set
+ SIZE to the size of the accessed member. */
+ offset = bitoffset.to_uhwi ();
+ type = TREE_TYPE (cval);
+ size = (bitoffset_end - bitoffset).to_uhwi ();
+ }
+
+ /* We do have overlap. Now see if the field is large enough
+ to cover the access. Give up for accesses that extend
+ beyond the end of the object or that span multiple fields. */
if (wi::cmps (access_end, bitoffset_end) > 0)
return NULL_TREE;
if (offset < bitoffset)
return NULL_TREE;
+
+ offset_int inner_offset = offset_int (offset) - bitoffset;
return fold_ctor_reference (type, cval,
inner_offset.to_uhwi (), size,
- from_decl);
+ from_decl, suboff);
}
}
- /* When memory is not explicitely mentioned in constructor, it is 0. */
- return build_zero_cst (type);
+ /* Memory not explicitly mentioned in constructor is 0. */
+ return type ? build_zero_cst (type) : NULL_TREE;
}
-/* CTOR is value initializing memory, fold reference of type TYPE and
- size POLY_SIZE to the memory at bit POLY_OFFSET. */
+/* CTOR is value initializing memory. Fold a reference of TYPE and
+ bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
+ is zero, attempt to fold a reference to the entire subobject
+ which OFFSET refers to. This is used when folding accesses to
+ string members of aggregates. When non-null, set *SUBOFF to
+ the bit offset of the accessed subobject. */
tree
-fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
- poly_uint64 poly_size, tree from_decl)
+fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
+ const poly_uint64 &poly_size, tree from_decl,
+ unsigned HOST_WIDE_INT *suboff /* = NULL */)
{
tree ret;
/* We found the field with exact match. */
- if (useless_type_conversion_p (type, TREE_TYPE (ctor))
+ if (type
+ && useless_type_conversion_p (type, TREE_TYPE (ctor))
&& known_eq (poly_offset, 0U))
return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
@@ -6650,14 +6714,17 @@ fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
}
if (TREE_CODE (ctor) == CONSTRUCTOR)
{
+ unsigned HOST_WIDE_INT dummy = 0;
+ if (!suboff)
+ suboff = &dummy;
if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
|| TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
return fold_array_ctor_reference (type, ctor, offset, size,
- from_decl);
- else
- return fold_nonarray_ctor_reference (type, ctor, offset, size,
- from_decl);
+ from_decl, suboff);
+
+ return fold_nonarray_ctor_reference (type, ctor, offset, size,
+ from_decl, suboff);
}
return NULL_TREE;