aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-dfa.c
diff options
context:
space:
mode:
authorRichard Sandiford <richard.sandiford@linaro.org>2017-12-20 12:55:27 +0000
committerRichard Sandiford <rsandifo@gcc.gnu.org>2017-12-20 12:55:27 +0000
commit588db50c8cf2c2ea081b2be7c1c3bc5452cdef20 (patch)
tree3ba8f3c264cdf88b5f924b21cfa9f2ac097744d5 /gcc/tree-dfa.c
parent80d0198b739660305da17e12072578b7a99cb777 (diff)
downloadgcc-588db50c8cf2c2ea081b2be7c1c3bc5452cdef20.zip
gcc-588db50c8cf2c2ea081b2be7c1c3bc5452cdef20.tar.gz
gcc-588db50c8cf2c2ea081b2be7c1c3bc5452cdef20.tar.bz2
poly_int: get_ref_base_and_extent
This patch changes the types of the bit offsets and sizes returned by get_ref_base_and_extent to poly_int64. There are some callers that can't sensibly operate on polynomial offsets or handle cases where the offset and size aren't known exactly. This includes the IPA devirtualisation code (since there's no defined way of having vtables at variable offsets) and some parts of the DWARF code. The patch therefore adds a helper function get_ref_base_and_extent_hwi that either returns exact HOST_WIDE_INT bit positions and sizes or returns a null base to indicate failure. 2017-12-20 Richard Sandiford <richard.sandiford@linaro.org> Alan Hayward <alan.hayward@arm.com> David Sherwood <david.sherwood@arm.com> gcc/ * tree-dfa.h (get_ref_base_and_extent): Return the base, size and max_size as poly_int64_pods rather than HOST_WIDE_INTs. (get_ref_base_and_extent_hwi): Declare. * tree-dfa.c (get_ref_base_and_extent): Return the base, size and max_size as poly_int64_pods rather than HOST_WIDE_INTs. (get_ref_base_and_extent_hwi): New function. * cfgexpand.c (expand_debug_expr): Update call to get_ref_base_and_extent. * dwarf2out.c (add_var_loc_to_decl): Likewise. * gimple-fold.c (get_base_constructor): Return the offset as a poly_int64_pod rather than a HOST_WIDE_INT. (fold_const_aggregate_ref_1): Track polynomial sizes and offsets. * ipa-polymorphic-call.c (ipa_polymorphic_call_context::set_by_invariant) (extr_type_from_vtbl_ptr_store): Track polynomial offsets. (ipa_polymorphic_call_context::ipa_polymorphic_call_context) (check_stmt_for_type_change): Use get_ref_base_and_extent_hwi rather than get_ref_base_and_extent. (ipa_polymorphic_call_context::get_dynamic_type): Likewise. * ipa-prop.c (ipa_load_from_parm_agg, compute_complex_assign_jump_func) (get_ancestor_addr_info, determine_locally_known_aggregate_parts): Likewise. * ipa-param-manipulation.c (ipa_get_adjustment_candidate): Update call to get_ref_base_and_extent. * tree-sra.c (create_access, get_access_for_expr): Likewise. * tree-ssa-alias.c (ao_ref_base, aliasing_component_refs_p) (stmt_kills_ref_p): Likewise. * tree-ssa-dce.c (mark_aliased_reaching_defs_necessary_1): Likewise. * tree-ssa-scopedtables.c (avail_expr_hash, equal_mem_array_ref_p): Likewise. * tree-ssa-sccvn.c (vn_reference_lookup_3): Likewise. Use get_ref_base_and_extent_hwi rather than get_ref_base_and_extent when calling native_encode_expr. * tree-ssa-structalias.c (get_constraint_for_component_ref): Update call to get_ref_base_and_extent. (do_structure_copy): Use get_ref_base_and_extent_hwi rather than get_ref_base_and_extent. * var-tracking.c (track_expr_p): Likewise. Co-Authored-By: Alan Hayward <alan.hayward@arm.com> Co-Authored-By: David Sherwood <david.sherwood@arm.com> From-SVN: r255886
Diffstat (limited to 'gcc/tree-dfa.c')
-rw-r--r--gcc/tree-dfa.c144
1 files changed, 84 insertions, 60 deletions
diff --git a/gcc/tree-dfa.c b/gcc/tree-dfa.c
index db69bda..e50bda0 100644
--- a/gcc/tree-dfa.c
+++ b/gcc/tree-dfa.c
@@ -377,15 +377,15 @@ get_or_create_ssa_default_def (struct function *fn, tree var)
true, the storage order of the reference is reversed. */
tree
-get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
- HOST_WIDE_INT *psize,
- HOST_WIDE_INT *pmax_size,
+get_ref_base_and_extent (tree exp, poly_int64_pod *poffset,
+ poly_int64_pod *psize,
+ poly_int64_pod *pmax_size,
bool *preverse)
{
- offset_int bitsize = -1;
- offset_int maxsize;
+ poly_offset_int bitsize = -1;
+ poly_offset_int maxsize;
tree size_tree = NULL_TREE;
- offset_int bit_offset = 0;
+ poly_offset_int bit_offset = 0;
bool seen_variable_array_ref = false;
/* First get the final access size and the storage order from just the
@@ -400,11 +400,11 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
if (mode == BLKmode)
size_tree = TYPE_SIZE (TREE_TYPE (exp));
else
- bitsize = int (GET_MODE_BITSIZE (mode));
+ bitsize = GET_MODE_BITSIZE (mode);
}
if (size_tree != NULL_TREE
- && TREE_CODE (size_tree) == INTEGER_CST)
- bitsize = wi::to_offset (size_tree);
+ && poly_int_tree_p (size_tree))
+ bitsize = wi::to_poly_offset (size_tree);
*preverse = reverse_storage_order_for_component_p (exp);
@@ -419,7 +419,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
switch (TREE_CODE (exp))
{
case BIT_FIELD_REF:
- bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
+ bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
break;
case COMPONENT_REF:
@@ -427,10 +427,10 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
tree field = TREE_OPERAND (exp, 1);
tree this_offset = component_ref_field_offset (exp);
- if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
+ if (this_offset && poly_int_tree_p (this_offset))
{
- offset_int woffset = (wi::to_offset (this_offset)
- << LOG2_BITS_PER_UNIT);
+ poly_offset_int woffset = (wi::to_poly_offset (this_offset)
+ << LOG2_BITS_PER_UNIT);
woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
bit_offset += woffset;
@@ -438,7 +438,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
referenced the last field of a struct or a union member
then we have to adjust maxsize by the padding at the end
of our field. */
- if (seen_variable_array_ref && maxsize != -1)
+ if (seen_variable_array_ref && known_size_p (maxsize))
{
tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
tree next = DECL_CHAIN (field);
@@ -450,14 +450,15 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
tree fsize = DECL_SIZE_UNIT (field);
tree ssize = TYPE_SIZE_UNIT (stype);
if (fsize == NULL
- || TREE_CODE (fsize) != INTEGER_CST
+ || !poly_int_tree_p (fsize)
|| ssize == NULL
- || TREE_CODE (ssize) != INTEGER_CST)
+ || !poly_int_tree_p (ssize))
maxsize = -1;
else
{
- offset_int tem = (wi::to_offset (ssize)
- - wi::to_offset (fsize));
+ poly_offset_int tem
+ = (wi::to_poly_offset (ssize)
+ - wi::to_poly_offset (fsize));
tem <<= LOG2_BITS_PER_UNIT;
tem -= woffset;
maxsize += tem;
@@ -471,10 +472,10 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
/* We need to adjust maxsize to the whole structure bitsize.
But we can subtract any constant offset seen so far,
because that would get us out of the structure otherwise. */
- if (maxsize != -1
+ if (known_size_p (maxsize)
&& csize
- && TREE_CODE (csize) == INTEGER_CST)
- maxsize = wi::to_offset (csize) - bit_offset;
+ && poly_int_tree_p (csize))
+ maxsize = wi::to_poly_offset (csize) - bit_offset;
else
maxsize = -1;
}
@@ -488,14 +489,15 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
tree low_bound, unit_size;
/* If the resulting bit-offset is constant, track it. */
- if (TREE_CODE (index) == INTEGER_CST
+ if (poly_int_tree_p (index)
&& (low_bound = array_ref_low_bound (exp),
- TREE_CODE (low_bound) == INTEGER_CST)
+ poly_int_tree_p (low_bound))
&& (unit_size = array_ref_element_size (exp),
TREE_CODE (unit_size) == INTEGER_CST))
{
- offset_int woffset
- = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
+ poly_offset_int woffset
+ = wi::sext (wi::to_poly_offset (index)
+ - wi::to_poly_offset (low_bound),
TYPE_PRECISION (TREE_TYPE (index)));
woffset *= wi::to_offset (unit_size);
woffset <<= LOG2_BITS_PER_UNIT;
@@ -512,10 +514,10 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
/* We need to adjust maxsize to the whole array bitsize.
But we can subtract any constant offset seen so far,
because that would get us outside of the array otherwise. */
- if (maxsize != -1
+ if (known_size_p (maxsize)
&& asize
- && TREE_CODE (asize) == INTEGER_CST)
- maxsize = wi::to_offset (asize) - bit_offset;
+ && poly_int_tree_p (asize))
+ maxsize = wi::to_poly_offset (asize) - bit_offset;
else
maxsize = -1;
@@ -560,11 +562,12 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
base type boundary. This needs to include possible trailing
padding that is there for alignment purposes. */
if (seen_variable_array_ref
- && maxsize != -1
+ && known_size_p (maxsize)
&& (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
- || TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
- || (bit_offset + maxsize
- == wi::to_offset (TYPE_SIZE (TREE_TYPE (exp))))))
+ || !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
+ || (maybe_eq
+ (bit_offset + maxsize,
+ wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))))))
maxsize = -1;
/* Hand back the decl for MEM[&decl, off]. */
@@ -574,12 +577,13 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
else
{
- offset_int off = mem_ref_offset (exp);
+ poly_offset_int off = mem_ref_offset (exp);
off <<= LOG2_BITS_PER_UNIT;
off += bit_offset;
- if (wi::fits_shwi_p (off))
+ poly_int64 off_hwi;
+ if (off.to_shwi (&off_hwi))
{
- bit_offset = off;
+ bit_offset = off_hwi;
exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
}
}
@@ -594,7 +598,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
}
done:
- if (!wi::fits_shwi_p (bitsize) || wi::neg_p (bitsize))
+ if (!bitsize.to_shwi (psize) || maybe_lt (*psize, 0))
{
*poffset = 0;
*psize = -1;
@@ -603,9 +607,10 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
return exp;
}
- *psize = bitsize.to_shwi ();
-
- if (!wi::fits_shwi_p (bit_offset))
+ /* ??? Due to negative offsets in ARRAY_REF we can end up with
+ negative bit_offset here. We might want to store a zero offset
+ in this case. */
+ if (!bit_offset.to_shwi (poffset))
{
*poffset = 0;
*pmax_size = -1;
@@ -625,44 +630,37 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
|| (seen_variable_array_ref
&& (sz_tree == NULL_TREE
- || TREE_CODE (sz_tree) != INTEGER_CST
- || (bit_offset + maxsize == wi::to_offset (sz_tree)))))
+ || !poly_int_tree_p (sz_tree)
+ || maybe_eq (bit_offset + maxsize,
+ wi::to_poly_offset (sz_tree)))))
maxsize = -1;
}
/* If maxsize is unknown adjust it according to the size of the
base decl. */
- else if (maxsize == -1
- && DECL_SIZE (exp)
- && TREE_CODE (DECL_SIZE (exp)) == INTEGER_CST)
- maxsize = wi::to_offset (DECL_SIZE (exp)) - bit_offset;
+ else if (!known_size_p (maxsize)
+ && DECL_SIZE (exp)
+ && poly_int_tree_p (DECL_SIZE (exp)))
+ maxsize = wi::to_poly_offset (DECL_SIZE (exp)) - bit_offset;
}
else if (CONSTANT_CLASS_P (exp))
{
/* If maxsize is unknown adjust it according to the size of the
base type constant. */
- if (maxsize == -1
+ if (!known_size_p (maxsize)
&& TYPE_SIZE (TREE_TYPE (exp))
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
- maxsize = (wi::to_offset (TYPE_SIZE (TREE_TYPE (exp)))
+ && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))))
+ maxsize = (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))
- bit_offset);
}
- /* ??? Due to negative offsets in ARRAY_REF we can end up with
- negative bit_offset here. We might want to store a zero offset
- in this case. */
- *poffset = bit_offset.to_shwi ();
- if (!wi::fits_shwi_p (maxsize) || wi::neg_p (maxsize))
+ if (!maxsize.to_shwi (pmax_size)
+ || maybe_lt (*pmax_size, 0)
+ || !endpoint_representable_p (*poffset, *pmax_size))
*pmax_size = -1;
- else
- {
- *pmax_size = maxsize.to_shwi ();
- if (*poffset > HOST_WIDE_INT_MAX - *pmax_size)
- *pmax_size = -1;
- }
/* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't
check for such overflows individually and assume it works. */
- if (*psize != -1 && *poffset > HOST_WIDE_INT_MAX - *psize)
+ if (!endpoint_representable_p (*poffset, *psize))
{
*poffset = 0;
*psize = -1;
@@ -674,6 +672,32 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
return exp;
}
+/* Like get_ref_base_and_extent, but for cases in which we only care
+ about constant-width accesses at constant offsets. Return null
+ if the access is anything else. */
+
+tree
+get_ref_base_and_extent_hwi (tree exp, HOST_WIDE_INT *poffset,
+ HOST_WIDE_INT *psize, bool *preverse)
+{
+ poly_int64 offset, size, max_size;
+ HOST_WIDE_INT const_offset, const_size;
+ bool reverse;
+ tree decl = get_ref_base_and_extent (exp, &offset, &size, &max_size,
+ &reverse);
+ if (!offset.is_constant (&const_offset)
+ || !size.is_constant (&const_size)
+ || const_offset < 0
+ || !known_size_p (max_size)
+ || maybe_ne (max_size, const_size))
+ return NULL_TREE;
+
+ *poffset = const_offset;
+ *psize = const_size;
+ *preverse = reverse;
+ return decl;
+}
+
/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
denotes the starting address of the memory access EXP.
Returns NULL_TREE if the offset is not constant or any component