diff options
author | Richard Sandiford <richard.sandiford@linaro.org> | 2017-12-20 12:52:58 +0000 |
---|---|---|
committer | Richard Sandiford <rsandifo@gcc.gnu.org> | 2017-12-20 12:52:58 +0000 |
commit | b9c257340bd20ec0e7debffc62ed3e3901c2908d (patch) | |
tree | dcbb7292f9ffa26eed52e3bf4b9f6f183f9df8cd | |
parent | 5ffca72c5db83f53562a968a30d3955126f044f2 (diff) | |
download | gcc-b9c257340bd20ec0e7debffc62ed3e3901c2908d.zip gcc-b9c257340bd20ec0e7debffc62ed3e3901c2908d.tar.gz gcc-b9c257340bd20ec0e7debffc62ed3e3901c2908d.tar.bz2 |
poly_int: ao_ref and vn_reference_op_t
This patch changes the offset, size and max_size fields
of ao_ref from HOST_WIDE_INT to poly_int64 and propagates
the change through the code that references it. This includes
changing the off field of vn_reference_op_struct in the same way.
2017-12-20 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
gcc/
* inchash.h (inchash::hash::add_poly_int): New function.
* tree-ssa-alias.h (ao_ref::offset, ao_ref::size, ao_ref::max_size):
Use poly_int64 rather than HOST_WIDE_INT.
(ao_ref::max_size_known_p): New function.
* tree-ssa-sccvn.h (vn_reference_op_struct::off): Use poly_int64_pod
rather than HOST_WIDE_INT.
* tree-ssa-alias.c (ao_ref_base): Apply get_ref_base_and_extent
to temporaries until its interface is adjusted to match.
(ao_ref_init_from_ptr_and_size): Handle polynomial offsets and sizes.
(aliasing_component_refs_p, decl_refs_may_alias_p)
(indirect_ref_may_alias_decl_p, indirect_refs_may_alias_p): Take
the offsets and max_sizes as poly_int64s instead of HOST_WIDE_INTs.
(refs_may_alias_p_1, stmt_kills_ref_p): Adjust for changes to
ao_ref fields.
* alias.c (ao_ref_from_mem): Likewise.
* tree-ssa-dce.c (mark_aliased_reaching_defs_necessary_1): Likewise.
* tree-ssa-dse.c (valid_ao_ref_for_dse, normalize_ref)
(clear_bytes_written_by, setup_live_bytes_from_ref, compute_trims)
(maybe_trim_complex_store, maybe_trim_constructor_store)
(live_bytes_read, dse_classify_store): Likewise.
* tree-ssa-sccvn.c (vn_reference_compute_hash, vn_reference_eq):
(copy_reference_ops_from_ref, ao_ref_init_from_vn_reference)
(fully_constant_vn_reference_p, valueize_refs_1): Likewise.
(vn_reference_lookup_3): Likewise.
* tree-ssa-uninit.c (warn_uninitialized_vars): Likewise.
Co-Authored-By: Alan Hayward <alan.hayward@arm.com>
Co-Authored-By: David Sherwood <david.sherwood@arm.com>
From-SVN: r255872
-rw-r--r-- | gcc/ChangeLog | 30 | ||||
-rw-r--r-- | gcc/alias.c | 21 | ||||
-rw-r--r-- | gcc/inchash.h | 8 | ||||
-rw-r--r-- | gcc/tree-ssa-alias.c | 105 | ||||
-rw-r--r-- | gcc/tree-ssa-alias.h | 16 | ||||
-rw-r--r-- | gcc/tree-ssa-dce.c | 10 | ||||
-rw-r--r-- | gcc/tree-ssa-dse.c | 84 | ||||
-rw-r--r-- | gcc/tree-ssa-sccvn.c | 247 | ||||
-rw-r--r-- | gcc/tree-ssa-sccvn.h | 2 | ||||
-rw-r--r-- | gcc/tree-ssa-uninit.c | 14 |
10 files changed, 292 insertions, 245 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index d69e244..ccbe1b6 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -2,6 +2,36 @@ Alan Hayward <alan.hayward@arm.com> David Sherwood <david.sherwood@arm.com> + * inchash.h (inchash::hash::add_poly_int): New function. + * tree-ssa-alias.h (ao_ref::offset, ao_ref::size, ao_ref::max_size): + Use poly_int64 rather than HOST_WIDE_INT. + (ao_ref::max_size_known_p): New function. + * tree-ssa-sccvn.h (vn_reference_op_struct::off): Use poly_int64_pod + rather than HOST_WIDE_INT. + * tree-ssa-alias.c (ao_ref_base): Apply get_ref_base_and_extent + to temporaries until its interface is adjusted to match. + (ao_ref_init_from_ptr_and_size): Handle polynomial offsets and sizes. + (aliasing_component_refs_p, decl_refs_may_alias_p) + (indirect_ref_may_alias_decl_p, indirect_refs_may_alias_p): Take + the offsets and max_sizes as poly_int64s instead of HOST_WIDE_INTs. + (refs_may_alias_p_1, stmt_kills_ref_p): Adjust for changes to + ao_ref fields. + * alias.c (ao_ref_from_mem): Likewise. + * tree-ssa-dce.c (mark_aliased_reaching_defs_necessary_1): Likewise. + * tree-ssa-dse.c (valid_ao_ref_for_dse, normalize_ref) + (clear_bytes_written_by, setup_live_bytes_from_ref, compute_trims) + (maybe_trim_complex_store, maybe_trim_constructor_store) + (live_bytes_read, dse_classify_store): Likewise. + * tree-ssa-sccvn.c (vn_reference_compute_hash, vn_reference_eq): + (copy_reference_ops_from_ref, ao_ref_init_from_vn_reference) + (fully_constant_vn_reference_p, valueize_refs_1): Likewise. + (vn_reference_lookup_3): Likewise. + * tree-ssa-uninit.c (warn_uninitialized_vars): Likewise. + +2017-12-20 Richard Sandiford <richard.sandiford@linaro.org> + Alan Hayward <alan.hayward@arm.com> + David Sherwood <david.sherwood@arm.com> + * tree-ssa-alias.c (indirect_ref_may_alias_decl_p) (indirect_refs_may_alias_p): Use ranges_may_overlap_p instead of ranges_overlap_p. diff --git a/gcc/alias.c b/gcc/alias.c index a02eadc..b1ff8fd 100644 --- a/gcc/alias.c +++ b/gcc/alias.c @@ -331,9 +331,9 @@ ao_ref_from_mem (ao_ref *ref, const_rtx mem) /* If MEM_OFFSET/MEM_SIZE get us outside of ref->offset/ref->max_size drop ref->ref. */ if (MEM_OFFSET (mem) < 0 - || (ref->max_size != -1 - && ((MEM_OFFSET (mem) + MEM_SIZE (mem)) * BITS_PER_UNIT - > ref->max_size))) + || (ref->max_size_known_p () + && maybe_gt ((MEM_OFFSET (mem) + MEM_SIZE (mem)) * BITS_PER_UNIT, + ref->max_size))) ref->ref = NULL_TREE; /* Refine size and offset we got from analyzing MEM_EXPR by using @@ -344,19 +344,18 @@ ao_ref_from_mem (ao_ref *ref, const_rtx mem) /* The MEM may extend into adjacent fields, so adjust max_size if necessary. */ - if (ref->max_size != -1 - && ref->size > ref->max_size) - ref->max_size = ref->size; + if (ref->max_size_known_p ()) + ref->max_size = upper_bound (ref->max_size, ref->size); - /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of + /* If MEM_OFFSET and MEM_SIZE might get us outside of the base object of the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */ if (MEM_EXPR (mem) != get_spill_slot_decl (false) - && (ref->offset < 0 + && (maybe_lt (ref->offset, 0) || (DECL_P (ref->base) && (DECL_SIZE (ref->base) == NULL_TREE - || TREE_CODE (DECL_SIZE (ref->base)) != INTEGER_CST - || wi::ltu_p (wi::to_offset (DECL_SIZE (ref->base)), - ref->offset + ref->size))))) + || !poly_int_tree_p (DECL_SIZE (ref->base)) + || maybe_lt (wi::to_poly_offset (DECL_SIZE (ref->base)), + ref->offset + ref->size))))) return false; return true; diff --git a/gcc/inchash.h b/gcc/inchash.h index 6144629..ba5a7de 100644 --- a/gcc/inchash.h +++ b/gcc/inchash.h @@ -57,6 +57,14 @@ class hash val = iterative_hash_hashval_t (v, val); } + /* Add polynomial value V, treating each element as an unsigned int. */ + template<unsigned int N, typename T> + void add_poly_int (const poly_int_pod<N, T> &v) + { + for (unsigned int i = 0; i < N; ++i) + add_int (v.coeffs[i]); + } + /* Add HOST_WIDE_INT value V. */ void add_hwi (HOST_WIDE_INT v) { diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c index 8717a1f..ce63cc1 100644 --- a/gcc/tree-ssa-alias.c +++ b/gcc/tree-ssa-alias.c @@ -635,11 +635,15 @@ tree ao_ref_base (ao_ref *ref) { bool reverse; + HOST_WIDE_INT offset, size, max_size; if (ref->base) return ref->base; - ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size, - &ref->max_size, &reverse); + ref->base = get_ref_base_and_extent (ref->ref, &offset, &size, + &max_size, &reverse); + ref->offset = offset; + ref->size = size; + ref->max_size = max_size; return ref->base; } @@ -679,7 +683,8 @@ ao_ref_alias_set (ao_ref *ref) void ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size) { - HOST_WIDE_INT t, size_hwi, extra_offset = 0; + HOST_WIDE_INT t; + poly_int64 size_hwi, extra_offset = 0; ref->ref = NULL_TREE; if (TREE_CODE (ptr) == SSA_NAME) { @@ -689,11 +694,10 @@ ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size) ptr = gimple_assign_rhs1 (stmt); else if (is_gimple_assign (stmt) && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR - && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST) + && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset)) { ptr = gimple_assign_rhs1 (stmt); - extra_offset = BITS_PER_UNIT - * int_cst_value (gimple_assign_rhs2 (stmt)); + extra_offset *= BITS_PER_UNIT; } } @@ -717,8 +721,8 @@ ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size) } ref->offset += extra_offset; if (size - && tree_fits_shwi_p (size) - && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT) + && poly_int_tree_p (size, &size_hwi) + && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT)) ref->max_size = ref->size = size_hwi * BITS_PER_UNIT; else ref->max_size = ref->size = -1; @@ -779,11 +783,11 @@ static bool aliasing_component_refs_p (tree ref1, alias_set_type ref1_alias_set, alias_set_type base1_alias_set, - HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1, + poly_int64 offset1, poly_int64 max_size1, tree ref2, alias_set_type ref2_alias_set, alias_set_type base2_alias_set, - HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2, + poly_int64 offset2, poly_int64 max_size2, bool ref2_is_decl) { /* If one reference is a component references through pointers try to find a @@ -825,7 +829,7 @@ aliasing_component_refs_p (tree ref1, offset2 -= offadj; get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse); offset1 -= offadj; - return ranges_overlap_p (offset1, max_size1, offset2, max_size2); + return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2); } /* If we didn't find a common base, try the other way around. */ refp = &ref1; @@ -844,7 +848,7 @@ aliasing_component_refs_p (tree ref1, offset1 -= offadj; get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse); offset2 -= offadj; - return ranges_overlap_p (offset1, max_size1, offset2, max_size2); + return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2); } /* If we have two type access paths B1.path1 and B2.path2 they may @@ -1090,9 +1094,9 @@ nonoverlapping_component_refs_p (const_tree x, const_tree y) static bool decl_refs_may_alias_p (tree ref1, tree base1, - HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1, + poly_int64 offset1, poly_int64 max_size1, tree ref2, tree base2, - HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2) + poly_int64 offset2, poly_int64 max_size2) { gcc_checking_assert (DECL_P (base1) && DECL_P (base2)); @@ -1102,7 +1106,7 @@ decl_refs_may_alias_p (tree ref1, tree base1, /* If both references are based on the same variable, they cannot alias if the accesses do not overlap. */ - if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2)) + if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2)) return false; /* For components with variable position, the above test isn't sufficient, @@ -1124,12 +1128,11 @@ decl_refs_may_alias_p (tree ref1, tree base1, static bool indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, - HOST_WIDE_INT offset1, - HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED, + poly_int64 offset1, poly_int64 max_size1, alias_set_type ref1_alias_set, alias_set_type base1_alias_set, tree ref2 ATTRIBUTE_UNUSED, tree base2, - HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2, + poly_int64 offset2, poly_int64 max_size2, alias_set_type ref2_alias_set, alias_set_type base2_alias_set, bool tbaa_p) { @@ -1185,14 +1188,15 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, is bigger than the size of the decl we can't possibly access the decl via that pointer. */ if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1)) - && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST - && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST + && poly_int_tree_p (DECL_SIZE (base2)) + && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (ptrtype1))) /* ??? This in turn may run afoul when a decl of type T which is a member of union type U is accessed through a pointer to type U and sizeof T is smaller than sizeof U. */ && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE - && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1)))) + && known_lt (wi::to_poly_widest (DECL_SIZE (base2)), + wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ptrtype1))))) return false; if (!ref2) @@ -1203,8 +1207,8 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, dbase2 = ref2; while (handled_component_p (dbase2)) dbase2 = TREE_OPERAND (dbase2, 0); - HOST_WIDE_INT doffset1 = offset1; - offset_int doffset2 = offset2; + poly_int64 doffset1 = offset1; + poly_offset_int doffset2 = offset2; if (TREE_CODE (dbase2) == MEM_REF || TREE_CODE (dbase2) == TARGET_MEM_REF) doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT; @@ -1252,11 +1256,11 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, static bool indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, - HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1, + poly_int64 offset1, poly_int64 max_size1, alias_set_type ref1_alias_set, alias_set_type base1_alias_set, tree ref2 ATTRIBUTE_UNUSED, tree base2, - HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2, + poly_int64 offset2, poly_int64 max_size2, alias_set_type ref2_alias_set, alias_set_type base2_alias_set, bool tbaa_p) { @@ -1330,7 +1334,7 @@ indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, /* But avoid treating arrays as "objects", instead assume they can overlap by an exact multiple of their element size. */ && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE) - return ranges_overlap_p (offset1, max_size1, offset2, max_size2); + return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2); /* Do type-based disambiguation. */ if (base1_alias_set != base2_alias_set @@ -1365,8 +1369,8 @@ bool refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p) { tree base1, base2; - HOST_WIDE_INT offset1 = 0, offset2 = 0; - HOST_WIDE_INT max_size1 = -1, max_size2 = -1; + poly_int64 offset1 = 0, offset2 = 0; + poly_int64 max_size1 = -1, max_size2 = -1; bool var1_p, var2_p, ind1_p, ind2_p; gcc_checking_assert ((!ref1->ref @@ -2442,14 +2446,17 @@ stmt_kills_ref_p (gimple *stmt, ao_ref *ref) handling constant offset and size. */ /* For a must-alias check we need to be able to constrain the access properly. */ - if (ref->max_size == -1) + if (!ref->max_size_known_p ()) return false; - HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset; + HOST_WIDE_INT size, max_size, const_offset; + poly_int64 ref_offset = ref->offset; bool reverse; tree base - = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse); + = get_ref_base_and_extent (lhs, &const_offset, &size, &max_size, + &reverse); /* We can get MEM[symbol: sZ, index: D.8862_1] here, so base == ref->base does not always hold. */ + poly_int64 offset = const_offset; if (base != ref->base) { /* Try using points-to info. */ @@ -2466,18 +2473,13 @@ stmt_kills_ref_p (gimple *stmt, ao_ref *ref) if (!tree_int_cst_equal (TREE_OPERAND (base, 1), TREE_OPERAND (ref->base, 1))) { - offset_int off1 = mem_ref_offset (base); + poly_offset_int off1 = mem_ref_offset (base); off1 <<= LOG2_BITS_PER_UNIT; off1 += offset; - offset_int off2 = mem_ref_offset (ref->base); + poly_offset_int off2 = mem_ref_offset (ref->base); off2 <<= LOG2_BITS_PER_UNIT; off2 += ref_offset; - if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2)) - { - offset = off1.to_shwi (); - ref_offset = off2.to_shwi (); - } - else + if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset)) size = -1; } } @@ -2486,12 +2488,9 @@ stmt_kills_ref_p (gimple *stmt, ao_ref *ref) } /* For a must-alias check we need to be able to constrain the access properly. */ - if (size != -1 && size == max_size) - { - if (offset <= ref_offset - && offset + size >= ref_offset + ref->max_size) - return true; - } + if (size == max_size + && known_subrange_p (ref_offset, ref->max_size, offset, size)) + return true; } if (is_gimple_call (stmt)) @@ -2524,19 +2523,19 @@ stmt_kills_ref_p (gimple *stmt, ao_ref *ref) { /* For a must-alias check we need to be able to constrain the access properly. */ - if (ref->max_size == -1) + if (!ref->max_size_known_p ()) return false; tree dest = gimple_call_arg (stmt, 0); tree len = gimple_call_arg (stmt, 2); - if (!tree_fits_shwi_p (len)) + if (!poly_int_tree_p (len)) return false; tree rbase = ref->base; - offset_int roffset = ref->offset; + poly_offset_int roffset = ref->offset; ao_ref dref; ao_ref_init_from_ptr_and_size (&dref, dest, len); tree base = ao_ref_base (&dref); - offset_int offset = dref.offset; - if (!base || dref.size == -1) + poly_offset_int offset = dref.offset; + if (!base || !known_size_p (dref.size)) return false; if (TREE_CODE (base) == MEM_REF) { @@ -2549,9 +2548,9 @@ stmt_kills_ref_p (gimple *stmt, ao_ref *ref) rbase = TREE_OPERAND (rbase, 0); } if (base == rbase - && offset <= roffset - && (roffset + ref->max_size - <= offset + (wi::to_offset (len) << LOG2_BITS_PER_UNIT))) + && known_subrange_p (roffset, ref->max_size, offset, + wi::to_poly_offset (len) + << LOG2_BITS_PER_UNIT)) return true; break; } diff --git a/gcc/tree-ssa-alias.h b/gcc/tree-ssa-alias.h index c52ed3f..b6b23c9 100644 --- a/gcc/tree-ssa-alias.h +++ b/gcc/tree-ssa-alias.h @@ -80,11 +80,11 @@ struct ao_ref the following fields are not yet computed. */ tree base; /* The offset relative to the base. */ - HOST_WIDE_INT offset; + poly_int64 offset; /* The size of the access. */ - HOST_WIDE_INT size; + poly_int64 size; /* The maximum possible extent of the access or -1 if unconstrained. */ - HOST_WIDE_INT max_size; + poly_int64 max_size; /* The alias set of the access or -1 if not yet computed. */ alias_set_type ref_alias_set; @@ -94,8 +94,18 @@ struct ao_ref /* Whether the memory is considered a volatile access. */ bool volatile_p; + + bool max_size_known_p () const; }; +/* Return true if the maximum size is known, rather than the special -1 + marker. */ + +inline bool +ao_ref::max_size_known_p () const +{ + return known_size_p (max_size); +} /* In tree-ssa-alias.c */ extern void ao_ref_init (ao_ref *, tree); diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c index 3b9e107..280356c 100644 --- a/gcc/tree-ssa-dce.c +++ b/gcc/tree-ssa-dce.c @@ -488,13 +488,9 @@ mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data) { /* For a must-alias check we need to be able to constrain the accesses properly. */ - if (size != -1 && size == max_size - && ref->max_size != -1) - { - if (offset <= ref->offset - && offset + size >= ref->offset + ref->max_size) - return true; - } + if (size == max_size + && known_subrange_p (ref->offset, ref->max_size, offset, size)) + return true; /* Or they need to be exactly the same. */ else if (ref->ref /* Make sure there is no induction variable involved diff --git a/gcc/tree-ssa-dse.c b/gcc/tree-ssa-dse.c index 4036f7d..392313b 100644 --- a/gcc/tree-ssa-dse.c +++ b/gcc/tree-ssa-dse.c @@ -128,13 +128,12 @@ static bool valid_ao_ref_for_dse (ao_ref *ref) { return (ao_ref_base (ref) - && ref->max_size != -1 - && ref->size != 0 - && ref->max_size == ref->size - && ref->offset >= 0 - && (ref->offset % BITS_PER_UNIT) == 0 - && (ref->size % BITS_PER_UNIT) == 0 - && (ref->size != -1)); + && known_size_p (ref->max_size) + && maybe_ne (ref->size, 0) + && known_eq (ref->max_size, ref->size) + && known_ge (ref->offset, 0) + && multiple_p (ref->offset, BITS_PER_UNIT) + && multiple_p (ref->size, BITS_PER_UNIT)); } /* Try to normalize COPY (an ao_ref) relative to REF. Essentially when we are @@ -144,25 +143,31 @@ valid_ao_ref_for_dse (ao_ref *ref) static bool normalize_ref (ao_ref *copy, ao_ref *ref) { + if (!ordered_p (copy->offset, ref->offset)) + return false; + /* If COPY starts before REF, then reset the beginning of COPY to match REF and decrease the size of COPY by the number of bytes removed from COPY. */ - if (copy->offset < ref->offset) + if (maybe_lt (copy->offset, ref->offset)) { - HOST_WIDE_INT diff = ref->offset - copy->offset; - if (copy->size <= diff) + poly_int64 diff = ref->offset - copy->offset; + if (maybe_le (copy->size, diff)) return false; copy->size -= diff; copy->offset = ref->offset; } - HOST_WIDE_INT diff = copy->offset - ref->offset; - if (ref->size <= diff) + poly_int64 diff = copy->offset - ref->offset; + if (maybe_le (ref->size, diff)) return false; /* If COPY extends beyond REF, chop off its size appropriately. */ - HOST_WIDE_INT limit = ref->size - diff; - if (copy->size > limit) + poly_int64 limit = ref->size - diff; + if (!ordered_p (limit, copy->size)) + return false; + + if (maybe_gt (copy->size, limit)) copy->size = limit; return true; } @@ -183,15 +188,15 @@ clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref) /* Verify we have the same base memory address, the write has a known size and overlaps with REF. */ + HOST_WIDE_INT start, size; if (valid_ao_ref_for_dse (&write) && operand_equal_p (write.base, ref->base, OEP_ADDRESS_OF) - && write.size == write.max_size - && normalize_ref (&write, ref)) - { - HOST_WIDE_INT start = write.offset - ref->offset; - bitmap_clear_range (live_bytes, start / BITS_PER_UNIT, - write.size / BITS_PER_UNIT); - } + && known_eq (write.size, write.max_size) + && normalize_ref (&write, ref) + && (write.offset - ref->offset).is_constant (&start) + && write.size.is_constant (&size)) + bitmap_clear_range (live_bytes, start / BITS_PER_UNIT, + size / BITS_PER_UNIT); } /* REF is a memory write. Extract relevant information from it and @@ -201,12 +206,14 @@ clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref) static bool setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes) { + HOST_WIDE_INT const_size; if (valid_ao_ref_for_dse (ref) - && (ref->size / BITS_PER_UNIT + && ref->size.is_constant (&const_size) + && (const_size / BITS_PER_UNIT <= PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE))) { bitmap_clear (live_bytes); - bitmap_set_range (live_bytes, 0, ref->size / BITS_PER_UNIT); + bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT); return true; } return false; @@ -231,9 +238,15 @@ compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail, the REF to compute the trims. */ /* Now identify how much, if any of the tail we can chop off. */ - int last_orig = (ref->size / BITS_PER_UNIT) - 1; - int last_live = bitmap_last_set_bit (live); - *trim_tail = (last_orig - last_live) & ~0x1; + HOST_WIDE_INT const_size; + if (ref->size.is_constant (&const_size)) + { + int last_orig = (const_size / BITS_PER_UNIT) - 1; + int last_live = bitmap_last_set_bit (live); + *trim_tail = (last_orig - last_live) & ~0x1; + } + else + *trim_tail = 0; /* Identify how much, if any of the head we can chop off. */ int first_orig = 0; @@ -267,7 +280,7 @@ maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt) least half the size of the object to ensure we're trimming the entire real or imaginary half. By writing things this way we avoid more O(n) bitmap operations. */ - if (trim_tail * 2 >= ref->size / BITS_PER_UNIT) + if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size)) { /* TREE_REALPART is live */ tree x = TREE_REALPART (gimple_assign_rhs1 (stmt)); @@ -276,7 +289,7 @@ maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt) gimple_assign_set_lhs (stmt, y); gimple_assign_set_rhs1 (stmt, x); } - else if (trim_head * 2 >= ref->size / BITS_PER_UNIT) + else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size)) { /* TREE_IMAGPART is live */ tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt)); @@ -326,7 +339,8 @@ maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt) return; /* The number of bytes for the new constructor. */ - int count = (ref->size / BITS_PER_UNIT) - head_trim - tail_trim; + poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT); + poly_int64 count = ref_bytes - head_trim - tail_trim; /* And the new type for the CONSTRUCTOR. Essentially it's just a char array large enough to cover the non-trimmed parts of @@ -483,15 +497,15 @@ live_bytes_read (ao_ref use_ref, ao_ref *ref, sbitmap live) { /* We have already verified that USE_REF and REF hit the same object. Now verify that there's actually an overlap between USE_REF and REF. */ - if (normalize_ref (&use_ref, ref)) + HOST_WIDE_INT start, size; + if (normalize_ref (&use_ref, ref) + && (use_ref.offset - ref->offset).is_constant (&start) + && use_ref.size.is_constant (&size)) { - HOST_WIDE_INT start = use_ref.offset - ref->offset; - HOST_WIDE_INT size = use_ref.size; - /* If USE_REF covers all of REF, then it will hit one or more live bytes. This avoids useless iteration over the bitmap below. */ - if (start == 0 && size == ref->size) + if (start == 0 && known_eq (size, ref->size)) return true; /* Now check if any of the remaining bits in use_ref are set in LIVE. */ @@ -593,7 +607,7 @@ dse_classify_store (ao_ref *ref, gimple *stmt, gimple **use_stmt, ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt)); if (valid_ao_ref_for_dse (&use_ref) && use_ref.base == ref->base - && use_ref.size == use_ref.max_size + && known_eq (use_ref.size, use_ref.max_size) && !live_bytes_read (use_ref, ref, live_bytes)) { /* If this statement has a VDEF, then it is the diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c index b6f23f8..e3dbebd 100644 --- a/gcc/tree-ssa-sccvn.c +++ b/gcc/tree-ssa-sccvn.c @@ -560,7 +560,7 @@ vn_reference_compute_hash (const vn_reference_t vr1) hashval_t result; int i; vn_reference_op_t vro; - HOST_WIDE_INT off = -1; + poly_int64 off = -1; bool deref = false; FOR_EACH_VEC_ELT (vr1->operands, i, vro) @@ -569,17 +569,17 @@ vn_reference_compute_hash (const vn_reference_t vr1) deref = true; else if (vro->opcode != ADDR_EXPR) deref = false; - if (vro->off != -1) + if (maybe_ne (vro->off, -1)) { - if (off == -1) + if (known_eq (off, -1)) off = 0; off += vro->off; } else { - if (off != -1 - && off != 0) - hstate.add_int (off); + if (maybe_ne (off, -1) + && maybe_ne (off, 0)) + hstate.add_poly_int (off); off = -1; if (deref && vro->opcode == ADDR_EXPR) @@ -645,7 +645,7 @@ vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2) j = 0; do { - HOST_WIDE_INT off1 = 0, off2 = 0; + poly_int64 off1 = 0, off2 = 0; vn_reference_op_t vro1, vro2; vn_reference_op_s tem1, tem2; bool deref1 = false, deref2 = false; @@ -656,7 +656,7 @@ vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2) /* Do not look through a storage order barrier. */ else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse) return false; - if (vro1->off == -1) + if (known_eq (vro1->off, -1)) break; off1 += vro1->off; } @@ -667,11 +667,11 @@ vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2) /* Do not look through a storage order barrier. */ else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse) return false; - if (vro2->off == -1) + if (known_eq (vro2->off, -1)) break; off2 += vro2->off; } - if (off1 != off2) + if (maybe_ne (off1, off2)) return false; if (deref1 && vro1->opcode == ADDR_EXPR) { @@ -797,24 +797,23 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result) { tree this_offset = component_ref_field_offset (ref); if (this_offset - && TREE_CODE (this_offset) == INTEGER_CST) + && poly_int_tree_p (this_offset)) { tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1)); if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0) { - offset_int off - = (wi::to_offset (this_offset) + poly_offset_int off + = (wi::to_poly_offset (this_offset) + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT)); - if (wi::fits_shwi_p (off) - /* Probibit value-numbering zero offset components - of addresses the same before the pass folding - __builtin_object_size had a chance to run - (checking cfun->after_inlining does the - trick here). */ - && (TREE_CODE (orig) != ADDR_EXPR - || off != 0 - || cfun->after_inlining)) - temp.off = off.to_shwi (); + /* Probibit value-numbering zero offset components + of addresses the same before the pass folding + __builtin_object_size had a chance to run + (checking cfun->after_inlining does the + trick here). */ + if (TREE_CODE (orig) != ADDR_EXPR + || maybe_ne (off, 0) + || cfun->after_inlining) + off.to_shwi (&temp.off); } } } @@ -833,16 +832,15 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result) if (! temp.op2) temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype), size_int (TYPE_ALIGN_UNIT (eltype))); - if (TREE_CODE (temp.op0) == INTEGER_CST - && TREE_CODE (temp.op1) == INTEGER_CST + if (poly_int_tree_p (temp.op0) + && poly_int_tree_p (temp.op1) && TREE_CODE (temp.op2) == INTEGER_CST) { - offset_int off = ((wi::to_offset (temp.op0) - - wi::to_offset (temp.op1)) - * wi::to_offset (temp.op2) - * vn_ref_op_align_unit (&temp)); - if (wi::fits_shwi_p (off)) - temp.off = off.to_shwi(); + poly_offset_int off = ((wi::to_poly_offset (temp.op0) + - wi::to_poly_offset (temp.op1)) + * wi::to_offset (temp.op2) + * vn_ref_op_align_unit (&temp)); + off.to_shwi (&temp.off); } } break; @@ -929,9 +927,9 @@ ao_ref_init_from_vn_reference (ao_ref *ref, unsigned i; tree base = NULL_TREE; tree *op0_p = &base; - offset_int offset = 0; - offset_int max_size; - offset_int size = -1; + poly_offset_int offset = 0; + poly_offset_int max_size; + poly_offset_int size = -1; tree size_tree = NULL_TREE; alias_set_type base_alias_set = -1; @@ -947,11 +945,11 @@ ao_ref_init_from_vn_reference (ao_ref *ref, if (mode == BLKmode) size_tree = TYPE_SIZE (type); else - size = int (GET_MODE_BITSIZE (mode)); + size = GET_MODE_BITSIZE (mode); } if (size_tree != NULL_TREE - && TREE_CODE (size_tree) == INTEGER_CST) - size = wi::to_offset (size_tree); + && poly_int_tree_p (size_tree)) + size = wi::to_poly_offset (size_tree); /* Initially, maxsize is the same as the accessed element size. In the following it will only grow (or become -1). */ @@ -974,7 +972,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref, { vn_reference_op_t pop = &ops[i-1]; base = TREE_OPERAND (op->op0, 0); - if (pop->off == -1) + if (known_eq (pop->off, -1)) { max_size = -1; offset = 0; @@ -1019,12 +1017,12 @@ ao_ref_init_from_vn_reference (ao_ref *ref, parts manually. */ tree this_offset = DECL_FIELD_OFFSET (field); - if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST) + if (op->op1 || !poly_int_tree_p (this_offset)) max_size = -1; else { - offset_int woffset = (wi::to_offset (this_offset) - << LOG2_BITS_PER_UNIT); + poly_offset_int woffset = (wi::to_poly_offset (this_offset) + << LOG2_BITS_PER_UNIT); woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field)); offset += woffset; } @@ -1034,14 +1032,15 @@ ao_ref_init_from_vn_reference (ao_ref *ref, case ARRAY_RANGE_REF: case ARRAY_REF: /* We recorded the lower bound and the element size. */ - if (TREE_CODE (op->op0) != INTEGER_CST - || TREE_CODE (op->op1) != INTEGER_CST + if (!poly_int_tree_p (op->op0) + || !poly_int_tree_p (op->op1) || TREE_CODE (op->op2) != INTEGER_CST) max_size = -1; else { - offset_int woffset - = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1), + poly_offset_int woffset + = wi::sext (wi::to_poly_offset (op->op0) + - wi::to_poly_offset (op->op1), TYPE_PRECISION (TREE_TYPE (op->op0))); woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op); woffset <<= LOG2_BITS_PER_UNIT; @@ -1086,7 +1085,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref, /* We discount volatiles from value-numbering elsewhere. */ ref->volatile_p = false; - if (!wi::fits_shwi_p (size) || wi::neg_p (size)) + if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0)) { ref->offset = 0; ref->size = -1; @@ -1094,21 +1093,15 @@ ao_ref_init_from_vn_reference (ao_ref *ref, return true; } - ref->size = size.to_shwi (); - - if (!wi::fits_shwi_p (offset)) + if (!offset.to_shwi (&ref->offset)) { ref->offset = 0; ref->max_size = -1; return true; } - ref->offset = offset.to_shwi (); - - if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size)) + if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0)) ref->max_size = -1; - else - ref->max_size = max_size.to_shwi (); return true; } @@ -1353,7 +1346,7 @@ fully_constant_vn_reference_p (vn_reference_t ref) && (!INTEGRAL_TYPE_P (ref->type) || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0)) { - HOST_WIDE_INT off = 0; + poly_int64 off = 0; HOST_WIDE_INT size; if (INTEGRAL_TYPE_P (ref->type)) size = TYPE_PRECISION (ref->type); @@ -1371,7 +1364,7 @@ fully_constant_vn_reference_p (vn_reference_t ref) ++i; break; } - if (operands[i].off == -1) + if (known_eq (operands[i].off, -1)) return NULL_TREE; off += operands[i].off; if (operands[i].opcode == MEM_REF) @@ -1401,6 +1394,7 @@ fully_constant_vn_reference_p (vn_reference_t ref) return build_zero_cst (ref->type); else if (ctor != error_mark_node) { + HOST_WIDE_INT const_off; if (decl) { tree res = fold_ctor_reference (ref->type, ctor, @@ -1413,10 +1407,10 @@ fully_constant_vn_reference_p (vn_reference_t ref) return res; } } - else + else if (off.is_constant (&const_off)) { unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT]; - int len = native_encode_expr (ctor, buf, size, off); + int len = native_encode_expr (ctor, buf, size, const_off); if (len > 0) return native_interpret_expr (ref->type, buf, len); } @@ -1508,17 +1502,16 @@ valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything) /* If it transforms a non-constant ARRAY_REF into a constant one, adjust the constant offset. */ else if (vro->opcode == ARRAY_REF - && vro->off == -1 - && TREE_CODE (vro->op0) == INTEGER_CST - && TREE_CODE (vro->op1) == INTEGER_CST + && known_eq (vro->off, -1) + && poly_int_tree_p (vro->op0) + && poly_int_tree_p (vro->op1) && TREE_CODE (vro->op2) == INTEGER_CST) { - offset_int off = ((wi::to_offset (vro->op0) - - wi::to_offset (vro->op1)) - * wi::to_offset (vro->op2) - * vn_ref_op_align_unit (vro)); - if (wi::fits_shwi_p (off)) - vro->off = off.to_shwi (); + poly_offset_int off = ((wi::to_poly_offset (vro->op0) + - wi::to_poly_offset (vro->op1)) + * wi::to_offset (vro->op2) + * vn_ref_op_align_unit (vro)); + off.to_shwi (&vro->off); } } @@ -1834,10 +1827,11 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, vn_reference_t vr = (vn_reference_t)vr_; gimple *def_stmt = SSA_NAME_DEF_STMT (vuse); tree base = ao_ref_base (ref); - HOST_WIDE_INT offset, maxsize; + HOST_WIDE_INT offseti, maxsizei; static vec<vn_reference_op_s> lhs_ops; ao_ref lhs_ref; bool lhs_ref_ok = false; + poly_int64 copy_size; /* If the reference is based on a parameter that was determined as pointing to readonly memory it doesn't change. */ @@ -1949,14 +1943,14 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, if (*disambiguate_only) return (void *)-1; - offset = ref->offset; - maxsize = ref->max_size; - /* If we cannot constrain the size of the reference we cannot test if anything kills it. */ - if (maxsize == -1) + if (!ref->max_size_known_p ()) return (void *)-1; + poly_int64 offset = ref->offset; + poly_int64 maxsize = ref->max_size; + /* We can't deduce anything useful from clobbers. */ if (gimple_clobber_p (def_stmt)) return (void *)-1; @@ -1967,7 +1961,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, if (is_gimple_reg_type (vr->type) && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET) && integer_zerop (gimple_call_arg (def_stmt, 1)) - && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)) + && poly_int_tree_p (gimple_call_arg (def_stmt, 2)) && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR) { tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0); @@ -1976,13 +1970,11 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, bool reverse; base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2, &reverse); - size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8; - if ((unsigned HOST_WIDE_INT)size2 / 8 - == tree_to_uhwi (gimple_call_arg (def_stmt, 2)) - && maxsize2 != -1 + tree len = gimple_call_arg (def_stmt, 2); + if (known_size_p (maxsize2) && operand_equal_p (base, base2, 0) - && offset2 <= offset - && offset2 + size2 >= offset + maxsize) + && known_subrange_p (offset, maxsize, offset2, + wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT)) { tree val = build_zero_cst (vr->type); return vn_reference_lookup_or_insert_for_pieces @@ -2001,10 +1993,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, bool reverse; base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), &offset2, &size2, &maxsize2, &reverse); - if (maxsize2 != -1 + if (known_size_p (maxsize2) && operand_equal_p (base, base2, 0) - && offset2 <= offset - && offset2 + size2 >= offset + maxsize) + && known_subrange_p (offset, maxsize, offset2, size2)) { tree val = build_zero_cst (vr->type); return vn_reference_lookup_or_insert_for_pieces @@ -2014,13 +2005,17 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, /* 3) Assignment from a constant. We can use folds native encode/interpret routines to extract the assigned bits. */ - else if (ref->size == maxsize + else if (known_eq (ref->size, maxsize) && is_gimple_reg_type (vr->type) && !contains_storage_order_barrier_p (vr->operands) && gimple_assign_single_p (def_stmt) && CHAR_BIT == 8 && BITS_PER_UNIT == 8 - && maxsize % BITS_PER_UNIT == 0 - && offset % BITS_PER_UNIT == 0 + /* native_encode and native_decode operate on arrays of bytes + and so fundamentally need a compile-time size and offset. */ + && maxsize.is_constant (&maxsizei) + && maxsizei % BITS_PER_UNIT == 0 + && offset.is_constant (&offseti) + && offseti % BITS_PER_UNIT == 0 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)) || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt)))))) @@ -2036,8 +2031,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, && size2 % BITS_PER_UNIT == 0 && offset2 % BITS_PER_UNIT == 0 && operand_equal_p (base, base2, 0) - && offset2 <= offset - && offset2 + size2 >= offset + maxsize) + && known_subrange_p (offseti, maxsizei, offset2, size2)) { /* We support up to 512-bit values (for V8DFmode). */ unsigned char buffer[64]; @@ -2054,14 +2048,14 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, /* Make sure to interpret in a type that has a range covering the whole access size. */ if (INTEGRAL_TYPE_P (vr->type) - && ref->size != TYPE_PRECISION (vr->type)) - type = build_nonstandard_integer_type (ref->size, + && maxsizei != TYPE_PRECISION (vr->type)) + type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type)); tree val = native_interpret_expr (type, buffer - + ((offset - offset2) + + ((offseti - offset2) / BITS_PER_UNIT), - ref->size / BITS_PER_UNIT); + maxsizei / BITS_PER_UNIT); /* If we chop off bits because the types precision doesn't match the memory access size this is ok when optimizing reads but not when called from the DSE code during @@ -2084,7 +2078,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, /* 4) Assignment from an SSA name which definition we may be able to access pieces from. */ - else if (ref->size == maxsize + else if (known_eq (ref->size, maxsize) && is_gimple_reg_type (vr->type) && !contains_storage_order_barrier_p (vr->operands) && gimple_assign_single_p (def_stmt) @@ -2100,15 +2094,14 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, && maxsize2 != -1 && maxsize2 == size2 && operand_equal_p (base, base2, 0) - && offset2 <= offset - && offset2 + size2 >= offset + maxsize + && known_subrange_p (offset, maxsize, offset2, size2) /* ??? We can't handle bitfield precision extracts without either using an alternate type for the BIT_FIELD_REF and then doing a conversion or possibly adjusting the offset according to endianness. */ && (! INTEGRAL_TYPE_P (vr->type) - || ref->size == TYPE_PRECISION (vr->type)) - && ref->size % BITS_PER_UNIT == 0) + || known_eq (ref->size, TYPE_PRECISION (vr->type))) + && multiple_p (ref->size, BITS_PER_UNIT)) { code_helper rcode = BIT_FIELD_REF; tree ops[3]; @@ -2136,7 +2129,6 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, || handled_component_p (gimple_assign_rhs1 (def_stmt)))) { tree base2; - HOST_WIDE_INT maxsize2; int i, j, k; auto_vec<vn_reference_op_s> rhs; vn_reference_op_t vro; @@ -2147,8 +2139,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, /* See if the assignment kills REF. */ base2 = ao_ref_base (&lhs_ref); - maxsize2 = lhs_ref.max_size; - if (maxsize2 == -1 + if (!lhs_ref.max_size_known_p () || (base != base2 && (TREE_CODE (base) != MEM_REF || TREE_CODE (base2) != MEM_REF @@ -2175,15 +2166,15 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, may fail when comparing types for compatibility. But we really don't care here - further lookups with the rewritten operands will simply fail if we messed up types too badly. */ - HOST_WIDE_INT extra_off = 0; + poly_int64 extra_off = 0; if (j == 0 && i >= 0 && lhs_ops[0].opcode == MEM_REF - && lhs_ops[0].off != -1) + && maybe_ne (lhs_ops[0].off, -1)) { - if (lhs_ops[0].off == vr->operands[i].off) + if (known_eq (lhs_ops[0].off, vr->operands[i].off)) i--, j--; else if (vr->operands[i].opcode == MEM_REF - && vr->operands[i].off != -1) + && maybe_ne (vr->operands[i].off, -1)) { extra_off = vr->operands[i].off - lhs_ops[0].off; i--, j--; @@ -2209,11 +2200,11 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs); /* Apply an extra offset to the inner MEM_REF of the RHS. */ - if (extra_off != 0) + if (maybe_ne (extra_off, 0)) { if (rhs.length () < 2 || rhs[0].opcode != MEM_REF - || rhs[0].off == -1) + || known_eq (rhs[0].off, -1)) return (void *)-1; rhs[0].off += extra_off; rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0, @@ -2244,7 +2235,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) return (void *)-1; /* This can happen with bitfields. */ - if (ref->size != r.size) + if (maybe_ne (ref->size, r.size)) return (void *)-1; *ref = r; @@ -2267,18 +2258,19 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME) && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME) - && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))) + && poly_int_tree_p (gimple_call_arg (def_stmt, 2), ©_size)) { tree lhs, rhs; ao_ref r; - HOST_WIDE_INT rhs_offset, copy_size, lhs_offset; + poly_int64 rhs_offset, lhs_offset; vn_reference_op_s op; - HOST_WIDE_INT at; + poly_uint64 mem_offset; + poly_int64 at, byte_maxsize; /* Only handle non-variable, addressable refs. */ - if (ref->size != maxsize - || offset % BITS_PER_UNIT != 0 - || ref->size % BITS_PER_UNIT != 0) + if (maybe_ne (ref->size, maxsize) + || !multiple_p (offset, BITS_PER_UNIT, &at) + || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize)) return (void *)-1; /* Extract a pointer base and an offset for the destination. */ @@ -2297,17 +2289,19 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, } if (TREE_CODE (lhs) == ADDR_EXPR) { + HOST_WIDE_INT tmp_lhs_offset; tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0), - &lhs_offset); + &tmp_lhs_offset); + lhs_offset = tmp_lhs_offset; if (!tem) return (void *)-1; if (TREE_CODE (tem) == MEM_REF - && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))) + && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset)) { lhs = TREE_OPERAND (tem, 0); if (TREE_CODE (lhs) == SSA_NAME) lhs = SSA_VAL (lhs); - lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1)); + lhs_offset += mem_offset; } else if (DECL_P (tem)) lhs = build_fold_addr_expr (tem); @@ -2325,15 +2319,17 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, rhs = SSA_VAL (rhs); if (TREE_CODE (rhs) == ADDR_EXPR) { + HOST_WIDE_INT tmp_rhs_offset; tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0), - &rhs_offset); + &tmp_rhs_offset); + rhs_offset = tmp_rhs_offset; if (!tem) return (void *)-1; if (TREE_CODE (tem) == MEM_REF - && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))) + && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset)) { rhs = TREE_OPERAND (tem, 0); - rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1)); + rhs_offset += mem_offset; } else if (DECL_P (tem) || TREE_CODE (tem) == STRING_CST) @@ -2345,16 +2341,13 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, && TREE_CODE (rhs) != ADDR_EXPR) return (void *)-1; - copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2)); - /* The bases of the destination and the references have to agree. */ - at = offset / BITS_PER_UNIT; if (TREE_CODE (base) == MEM_REF) { if (TREE_OPERAND (base, 0) != lhs - || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))) + || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset)) return (void *) -1; - at += tree_to_uhwi (TREE_OPERAND (base, 1)); + at += mem_offset; } else if (!DECL_P (base) || TREE_CODE (lhs) != ADDR_EXPR @@ -2363,12 +2356,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, /* If the access is completely outside of the memcpy destination area there is no aliasing. */ - if (lhs_offset >= at + maxsize / BITS_PER_UNIT - || lhs_offset + copy_size <= at) + if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize)) return NULL; /* And the access has to be contained within the memcpy destination. */ - if (lhs_offset > at - || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT) + if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size)) return (void *)-1; /* Make room for 2 operands in the new reference. */ @@ -2406,7 +2397,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) return (void *)-1; /* This can happen with bitfields. */ - if (ref->size != r.size) + if (maybe_ne (ref->size, r.size)) return (void *)-1; *ref = r; diff --git a/gcc/tree-ssa-sccvn.h b/gcc/tree-ssa-sccvn.h index 38877bc..8308768 100644 --- a/gcc/tree-ssa-sccvn.h +++ b/gcc/tree-ssa-sccvn.h @@ -93,7 +93,7 @@ typedef struct vn_reference_op_struct /* For storing TYPE_ALIGN for array ref element size computation. */ unsigned align : 6; /* Constant offset this op adds or -1 if it is variable. */ - HOST_WIDE_INT off; + poly_int64_pod off; tree type; tree op0; tree op1; diff --git a/gcc/tree-ssa-uninit.c b/gcc/tree-ssa-uninit.c index b17b2b1..b921751 100644 --- a/gcc/tree-ssa-uninit.c +++ b/gcc/tree-ssa-uninit.c @@ -294,15 +294,15 @@ warn_uninitialized_vars (bool warn_possibly_uninitialized) /* Do not warn if the access is fully outside of the variable. */ + poly_int64 decl_size; if (DECL_P (base) - && ref.size != -1 - && ((ref.max_size == ref.size - && ref.offset + ref.size <= 0) - || (ref.offset >= 0 + && known_size_p (ref.size) + && ((known_eq (ref.max_size, ref.size) + && known_le (ref.offset + ref.size, 0)) + || (known_ge (ref.offset, 0) && DECL_SIZE (base) - && TREE_CODE (DECL_SIZE (base)) == INTEGER_CST - && compare_tree_int (DECL_SIZE (base), - ref.offset) <= 0))) + && poly_int_tree_p (DECL_SIZE (base), &decl_size) + && known_le (decl_size, ref.offset)))) continue; /* Do not warn if the access is then used for a BIT_INSERT_EXPR. */ |