aboutsummaryrefslogtreecommitdiff
path: root/gcc/builtins.c
diff options
context:
space:
mode:
authorGiuliano Belinassi <giuliano.belinassi@usp.br>2020-08-22 17:43:43 -0300
committerGiuliano Belinassi <giuliano.belinassi@usp.br>2020-08-22 17:43:43 -0300
commita926878ddbd5a98b272c22171ce58663fc04c3e0 (patch)
tree86af256e5d9a9c06263c00adc90e5fe348008c43 /gcc/builtins.c
parent542730f087133690b47e036dfd43eb0db8a650ce (diff)
parent07cbaed8ba7d1b6e4ab3a9f44175502a4e1ecdb1 (diff)
downloadgcc-devel/autopar_devel.zip
gcc-devel/autopar_devel.tar.gz
gcc-devel/autopar_devel.tar.bz2
Merge branch 'autopar_rebase2' into autopar_develdevel/autopar_devel
Quickly commit changes in the rebase branch.
Diffstat (limited to 'gcc/builtins.c')
-rw-r--r--gcc/builtins.c1017
1 files changed, 611 insertions, 406 deletions
diff --git a/gcc/builtins.c b/gcc/builtins.c
index 53bae59..8845816 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -73,6 +73,9 @@ along with GCC; see the file COPYING3. If not see
#include "gomp-constants.h"
#include "omp-general.h"
#include "tree-dfa.h"
+#include "gimple-ssa.h"
+#include "tree-ssa-live.h"
+#include "tree-outof-ssa.h"
struct target_builtins default_target_builtins;
#if SWITCHABLE_TARGET
@@ -119,7 +122,7 @@ static rtx expand_builtin_next_arg (void);
static rtx expand_builtin_va_start (tree);
static rtx expand_builtin_va_end (tree);
static rtx expand_builtin_va_copy (tree);
-static rtx inline_expand_builtin_string_cmp (tree, rtx);
+static rtx inline_expand_builtin_bytecmp (tree, rtx);
static rtx expand_builtin_strcmp (tree, rtx);
static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
@@ -3227,20 +3230,18 @@ expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
}
/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
- bytes from constant string DATA + OFFSET and return it as target
- constant. */
+ bytes from bytes at DATA + OFFSET and return it reinterpreted as
+ a target constant. */
static rtx
builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
scalar_int_mode mode)
{
- const char *str = (const char *) data;
+ /* The REPresentation pointed to by DATA need not be a nul-terminated
+ string but the caller guarantees it's large enough for MODE. */
+ const char *rep = (const char *) data;
- gcc_assert (offset >= 0
- && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
- <= strlen (str) + 1));
-
- return c_readstr (str + offset, mode);
+ return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
}
/* LEN specify length of the block of memcpy/memset operation.
@@ -3286,12 +3287,6 @@ determine_block_size (tree len, rtx len_rtx,
}
else if (range_type == VR_ANTI_RANGE)
{
- /* Anti range 0...N lets us to determine minimal size to N+1. */
- if (min == 0)
- {
- if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
- *min_size = max.to_uhwi () + 1;
- }
/* Code like
int n;
@@ -3301,7 +3296,7 @@ determine_block_size (tree len, rtx len_rtx,
Produce anti range allowing negative values of N. We still
can use the information and make a guess that N is not negative.
*/
- else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
+ if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
*probable_max_size = min.to_uhwi () - 1;
}
}
@@ -3310,6 +3305,214 @@ determine_block_size (tree len, rtx len_rtx,
GET_MODE_MASK (GET_MODE (len_rtx)));
}
+/* For an expression EXP issue an access warning controlled by option OPT
+ with access to a region SLEN bytes in size in the RANGE of sizes. */
+
+static bool
+warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
+ tree slen, bool access)
+{
+ bool warned = false;
+
+ if (access)
+ {
+ if (tree_int_cst_equal (range[0], range[1]))
+ warned = (func
+ ? warning_n (loc, opt, tree_to_uhwi (range[0]),
+ "%K%qD reading %E byte from a region of size %E",
+ "%K%qD reading %E bytes from a region of size %E",
+ exp, func, range[0], slen)
+ : warning_n (loc, opt, tree_to_uhwi (range[0]),
+ "%Kreading %E byte from a region of size %E",
+ "%Kreading %E bytes from a region of size %E",
+ exp, range[0], slen));
+ else if (tree_int_cst_sign_bit (range[1]))
+ {
+ /* Avoid printing the upper bound if it's invalid. */
+ warned = (func
+ ? warning_at (loc, opt,
+ "%K%qD reading %E or more bytes from a region "
+ "of size %E",
+ exp, func, range[0], slen)
+ : warning_at (loc, opt,
+ "%Kreading %E or more bytes from a region "
+ "of size %E",
+ exp, range[0], slen));
+ }
+ else
+ warned = (func
+ ? warning_at (loc, opt,
+ "%K%qD reading between %E and %E bytes from "
+ "a region of size %E",
+ exp, func, range[0], range[1], slen)
+ : warning_at (loc, opt,
+ "%Kreading between %E and %E bytes from "
+ "a region of size %E",
+ exp, range[0], range[1], slen));
+
+ return warned;
+ }
+
+ if (tree_int_cst_equal (range[0], range[1]))
+ warned = (func
+ ? warning_n (loc, opt, tree_to_uhwi (range[0]),
+ "%K%qD epecting %E byte in a region of size %E",
+ "%K%qD expecting %E bytes in a region of size %E",
+ exp, func, range[0], slen)
+ : warning_n (loc, opt, tree_to_uhwi (range[0]),
+ "%Kexpecting %E byte in a region of size %E",
+ "%Kexpecting %E bytes in a region of size %E",
+ exp, range[0], slen));
+ else if (tree_int_cst_sign_bit (range[1]))
+ {
+ /* Avoid printing the upper bound if it's invalid. */
+ warned = (func
+ ? warning_at (loc, opt,
+ "%K%qD expecting %E or more bytes in a region "
+ "of size %E",
+ exp, func, range[0], slen)
+ : warning_at (loc, opt,
+ "%Kexpecting %E or more bytes in a region "
+ "of size %E",
+ exp, range[0], slen));
+ }
+ else
+ warned = (func
+ ? warning_at (loc, opt,
+ "%K%qD expecting between %E and %E bytes in "
+ "a region of size %E",
+ exp, func, range[0], range[1], slen)
+ : warning_at (loc, opt,
+ "%Kexpectting between %E and %E bytes in "
+ "a region of size %E",
+ exp, range[0], range[1], slen));
+ return warned;
+}
+
+/* Issue an inform message describing the target of an access REF.
+ WRITE is set for a write access and clear for a read access. */
+
+static void
+inform_access (const access_ref &ref, bool write)
+{
+ if (!ref.ref)
+ return;
+
+ /* Convert offset range and avoid including a zero range since it isn't
+ necessarily meaningful. */
+ long long minoff = 0, maxoff = 0;
+ if (wi::fits_shwi_p (ref.offrng[0])
+ && wi::fits_shwi_p (ref.offrng[1]))
+ {
+ minoff = ref.offrng[0].to_shwi ();
+ maxoff = ref.offrng[1].to_shwi ();
+ }
+
+ /* Convert size range and always include it since all sizes are
+ meaningful. */
+ unsigned long long minsize = 0, maxsize = 0;
+ if (wi::fits_shwi_p (ref.sizrng[0])
+ && wi::fits_shwi_p (ref.sizrng[1]))
+ {
+ minsize = ref.sizrng[0].to_shwi ();
+ maxsize = ref.sizrng[1].to_shwi ();
+ }
+
+ char sizestr[80];
+ location_t loc;
+ tree allocfn = NULL_TREE;
+ if (TREE_CODE (ref.ref) == SSA_NAME)
+ {
+ gimple *stmt = SSA_NAME_DEF_STMT (ref.ref);
+ gcc_assert (is_gimple_call (stmt));
+ loc = gimple_location (stmt);
+ allocfn = gimple_call_fndecl (stmt);
+ if (!allocfn)
+ /* Handle calls through pointers to functions. */
+ allocfn = gimple_call_fn (stmt);
+
+ /* SIZRNG doesn't necessarily have the same range as the allocation
+ size determined by gimple_call_alloc_size (). */
+
+ if (minsize == maxsize)
+ sprintf (sizestr, "%llu", minsize);
+ else
+ sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
+
+ }
+ else
+ loc = DECL_SOURCE_LOCATION (ref.ref);
+
+ if (write)
+ {
+ if (DECL_P (ref.ref))
+ {
+ if (minoff == maxoff)
+ {
+ if (minoff == 0)
+ inform (loc, "destination object %qD", ref.ref);
+ else
+ inform (loc, "at offset %lli into destination object %qD",
+ minoff, ref.ref);
+ }
+ else
+ inform (loc, "at offset [%lli, %lli] into destination object %qD",
+ minoff, maxoff, ref.ref);
+ return;
+ }
+
+ if (minoff == maxoff)
+ {
+ if (minoff == 0)
+ inform (loc, "destination object of size %s allocated by %qE",
+ sizestr, allocfn);
+ else
+ inform (loc,
+ "at offset %lli into destination object of size %s "
+ "allocated by %qE", minoff, sizestr, allocfn);
+ }
+ else
+ inform (loc,
+ "at offset [%lli, %lli] into destination object of size %s "
+ "allocated by %qE",
+ minoff, maxoff, sizestr, allocfn);
+
+ return;
+ }
+
+ if (DECL_P (ref.ref))
+ {
+ if (minoff == maxoff)
+ {
+ if (minoff == 0)
+ inform (loc, "source object %qD", ref.ref);
+ else
+ inform (loc, "at offset %lli into source object %qD",
+ minoff, ref.ref);
+ }
+ else
+ inform (loc, "at offset [%lli, %lli] into source object %qD",
+ minoff, maxoff, ref.ref);
+ return;
+ }
+
+ if (minoff == maxoff)
+ {
+ if (minoff == 0)
+ inform (loc, "source object of size %s allocated by %qE",
+ sizestr, allocfn);
+ else
+ inform (loc,
+ "at offset %lli into source object of size %s "
+ "allocated by %qE", minoff, sizestr, allocfn);
+ }
+ else
+ inform (loc,
+ "at offset [%lli, %lli] into source object of size %s "
+ "allocated by %qE",
+ minoff, maxoff, sizestr, allocfn);
+}
+
/* Try to verify that the sizes and lengths of the arguments to a string
manipulation function given by EXP are within valid bounds and that
the operation does not lead to buffer overflow or read past the end.
@@ -3336,12 +3539,19 @@ determine_block_size (tree len, rtx len_rtx,
When DSTWRITE is null LEN is checked to verify that it doesn't exceed
SIZE_MAX.
+ ACCESS is true for accesses, false for simple size checks in calls
+ to functions that neither read from nor write to the region.
+
+ When nonnull, PAD points to a more detailed description of the access.
+
If the call is successfully verified as safe return true, otherwise
return false. */
bool
check_access (tree exp, tree, tree, tree dstwrite,
- tree maxread, tree srcstr, tree dstsize)
+ tree maxread, tree srcstr, tree dstsize,
+ bool access /* = true */,
+ const access_data *pad /* = NULL */)
{
int opt = OPT_Wstringop_overflow_;
@@ -3545,7 +3755,11 @@ check_access (tree exp, tree, tree, tree dstwrite,
exp, range[0], range[1],
dstsize));
if (warned)
- TREE_NO_WARNING (exp) = true;
+ {
+ TREE_NO_WARNING (exp) = true;
+ if (pad)
+ inform_access (pad->dst, true);
+ }
/* Return error when an overflow has been detected. */
return false;
@@ -3649,46 +3863,15 @@ check_access (tree exp, tree, tree, tree dstwrite,
if (TREE_NO_WARNING (exp))
return false;
- bool warned = false;
location_t loc = tree_nonartificial_location (exp);
loc = expansion_point_location_if_in_system_header (loc);
- if (tree_int_cst_equal (range[0], range[1]))
- warned = (func
- ? warning_n (loc, opt, tree_to_uhwi (range[0]),
- "%K%qD reading %E byte from a region of size %E",
- "%K%qD reading %E bytes from a region of size %E",
- exp, func, range[0], slen)
- : warning_n (loc, opt, tree_to_uhwi (range[0]),
- "%Kreading %E byte from a region of size %E",
- "%Kreading %E bytes from a region of size %E",
- exp, range[0], slen));
- else if (tree_int_cst_sign_bit (range[1]))
+ if (warn_for_access (loc, func, exp, opt, range, slen, access))
{
- /* Avoid printing the upper bound if it's invalid. */
- warned = (func
- ? warning_at (loc, opt,
- "%K%qD reading %E or more bytes from a region "
- "of size %E",
- exp, func, range[0], slen)
- : warning_at (loc, opt,
- "%Kreading %E or more bytes from a region "
- "of size %E",
- exp, range[0], slen));
+ TREE_NO_WARNING (exp) = true;
+ if (pad)
+ inform_access (pad->src, false);
}
- else
- warned = (func
- ? warning_at (loc, opt,
- "%K%qD reading between %E and %E bytes from "
- "a region of size %E",
- exp, func, range[0], range[1], slen)
- : warning_at (loc, opt,
- "%Kreading between %E and %E bytes from "
- "a region of size %E",
- exp, range[0], range[1], slen));
- if (warned)
- TREE_NO_WARNING (exp) = true;
-
return false;
}
@@ -3787,185 +3970,120 @@ gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
return wide_int_to_tree (sizetype, rng1[1]);
}
-/* Helper for compute_objsize. Returns the constant size of the DEST
- if it refers to a variable or field and sets *PDECL to the DECL and
- *POFF to zero. Otherwise returns null for other nodes. */
+/* Wrapper around the wide_int overload of get_range. Returns the same
+ result but accepts offset_int instead. */
-static tree
-addr_decl_size (tree dest, tree *pdecl, tree *poff)
+static bool
+get_range (tree x, signop sgn, offset_int r[2],
+ const vr_values *rvals /* = NULL */)
{
- if (TREE_CODE (dest) == ADDR_EXPR)
- dest = TREE_OPERAND (dest, 0);
-
- if (DECL_P (dest))
- {
- *pdecl = dest;
- *poff = integer_zero_node;
- if (tree size = DECL_SIZE_UNIT (dest))
- return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
- }
-
- if (TREE_CODE (dest) == COMPONENT_REF)
- {
- *pdecl = TREE_OPERAND (dest, 1);
- *poff = integer_zero_node;
- /* Only return constant sizes for now while callers depend on it. */
- if (tree size = component_ref_size (dest))
- return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
- }
+ wide_int wr[2];
+ if (!get_range (x, wr, rvals))
+ return false;
- return NULL_TREE;
+ r[0] = offset_int::from (wr[0], sgn);
+ r[1] = offset_int::from (wr[1], sgn);
+ return true;
}
-/* Helper to compute the size of the object referenced by the DEST
+/* Helper to compute the size of the object referenced by the PTR
expression which must have pointer type, using Object Size type
OSTYPE (only the least significant 2 bits are used).
- Returns an estimate of the size of the object represented as
- a sizetype constant if successful or NULL when the size cannot
- be determined.
- When the referenced object involves a non-constant offset in some
- range the returned value represents the largest size given the
- smallest non-negative offset in the range.
- If nonnull, sets *PDECL to the decl of the referenced subobject
- if it can be determined, or to null otherwise. Likewise, when
- POFF is nonnull *POFF is set to the offset into *PDECL.
+ On success, sets PREF->REF to the DECL of the referenced object
+ if it's unique, otherwise to null, PREF->OFFRNG to the range of
+ offsets into it, and PREF->SIZRNG to the range of sizes of
+ the object(s).
+ VISITED is used to avoid visiting the same PHI operand multiple
+ times, and, when nonnull, RVALS to determine range information.
+ Returns true on success, false when the size cannot be determined.
The function is intended for diagnostics and should not be used
to influence code generation or optimization. */
-tree
-compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */,
- tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
+static bool
+compute_objsize (tree ptr, int ostype, access_ref *pref,
+ bitmap *visited, const vr_values *rvals /* = NULL */)
{
- tree dummy_decl = NULL_TREE;
- if (!pdecl)
- pdecl = &dummy_decl;
+ const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
+ if (addr)
+ ptr = TREE_OPERAND (ptr, 0);
- tree dummy_off = NULL_TREE;
- if (!poff)
- poff = &dummy_off;
+ if (DECL_P (ptr))
+ {
+ /* Bail if the reference is to the pointer itself (as opposed
+ to what it points to). */
+ if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
+ return false;
- /* Only the two least significant bits are meaningful. */
- ostype &= 3;
+ tree size = decl_init_size (ptr, false);
+ if (!size || TREE_CODE (size) != INTEGER_CST)
+ return false;
- if (ostype)
- /* Except for overly permissive calls to memcpy and other raw
- memory functions with zero OSTYPE, detect the size from simple
- DECLs first to more reliably than compute_builtin_object_size
- set *PDECL and *POFF. */
- if (tree size = addr_decl_size (dest, pdecl, poff))
- return size;
+ pref->ref = ptr;
+ pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
+ return true;
+ }
- unsigned HOST_WIDE_INT size;
- if (compute_builtin_object_size (dest, ostype, &size, pdecl, poff))
- return build_int_cst (sizetype, size);
+ const tree_code code = TREE_CODE (ptr);
- if (TREE_CODE (dest) == SSA_NAME)
+ if (code == COMPONENT_REF)
{
- gimple *stmt = SSA_NAME_DEF_STMT (dest);
- if (is_gimple_call (stmt))
+ tree field = TREE_OPERAND (ptr, 1);
+
+ if (ostype == 0)
{
- /* If STMT is a call to an allocation function get the size
- from its argument(s). If successful, also set *PDECL to
- DEST for the caller to include in diagnostics. */
- if (tree size = gimple_call_alloc_size (stmt))
- {
- *pdecl = dest;
- *poff = integer_zero_node;
- return size;
- }
- return NULL_TREE;
- }
+ /* For raw memory functions like memcpy bail if the size
+ of the enclosing object cannot be determined. */
+ tree ref = TREE_OPERAND (ptr, 0);
+ if (!compute_objsize (ref, ostype, pref, visited, rvals)
+ || !pref->ref)
+ return false;
- if (!is_gimple_assign (stmt))
- return NULL_TREE;
+ /* Otherwise, use the size of the enclosing object and add
+ the offset of the member to the offset computed so far. */
+ tree offset = byte_position (field);
+ if (TREE_CODE (offset) != INTEGER_CST)
+ return false;
+ offset_int off = wi::to_offset (offset);
+ pref->offrng[0] += off;
+ pref->offrng[1] += off;
+ return true;
+ }
- dest = gimple_assign_rhs1 (stmt);
+ /* Bail if the reference is to the pointer itself (as opposed
+ to what it points to). */
+ if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
+ return false;
- tree_code code = gimple_assign_rhs_code (stmt);
- if (code == POINTER_PLUS_EXPR)
+ pref->ref = field;
+ /* Only return constant sizes for now while callers depend
+ on it. INT0LEN is true for interior zero-length arrays. */
+ bool int0len = false;
+ tree size = component_ref_size (ptr, &int0len);
+ if (int0len)
{
- /* compute_builtin_object_size fails for addresses with
- non-constant offsets. Try to determine the range of
- such an offset here and use it to adjust the constant
- size. */
- tree off = gimple_assign_rhs2 (stmt);
- if (TREE_CODE (off) == INTEGER_CST)
- {
- if (tree size = compute_objsize (dest, ostype, pdecl, poff))
- {
- wide_int wioff = wi::to_wide (off);
- wide_int wisiz = wi::to_wide (size);
-
- /* Ignore negative offsets for now. For others,
- use the lower bound as the most optimistic
- estimate of the (remaining) size. */
- if (wi::neg_p (wioff))
- ;
- else
- {
- if (*poff)
- {
- *poff = fold_convert (ptrdiff_type_node, *poff);
- off = fold_convert (ptrdiff_type_node, *poff);
- *poff = size_binop (PLUS_EXPR, *poff, off);
- }
- else
- *poff = off;
- if (wi::ltu_p (wioff, wisiz))
- return wide_int_to_tree (TREE_TYPE (size),
- wi::sub (wisiz, wioff));
- return size_zero_node;
- }
- }
- }
- else if (TREE_CODE (off) == SSA_NAME
- && INTEGRAL_TYPE_P (TREE_TYPE (off)))
- {
- wide_int min, max;
- enum value_range_kind rng = get_range_info (off, &min, &max);
-
- if (rng == VR_RANGE)
- if (tree size = compute_objsize (dest, ostype, pdecl, poff))
- {
- wide_int wisiz = wi::to_wide (size);
-
- /* Ignore negative offsets for now. For others,
- use the lower bound as the most optimistic
- estimate of the (remaining)size. */
- if (wi::neg_p (min) || wi::neg_p (max))
- ;
- else
- {
- /* FIXME: For now, since the offset is non-constant,
- clear *POFF to keep it from being "misused."
- Eventually *POFF will need to become a range that
- can be properly added to the outer offset if it
- too is one. */
- *poff = NULL_TREE;
- if (wi::ltu_p (min, wisiz))
- return wide_int_to_tree (TREE_TYPE (size),
- wi::sub (wisiz, min));
- return size_zero_node;
- }
- }
- }
+ pref->sizrng[0] = pref->sizrng[1] = 0;
+ return true;
}
- else if (code != ADDR_EXPR)
- return NULL_TREE;
- }
- /* Unless computing the largest size (for memcpy and other raw memory
- functions), try to determine the size of the object from its type. */
- if (!ostype)
- return NULL_TREE;
+ if (!size || TREE_CODE (size) != INTEGER_CST)
+ return false;
- if (TREE_CODE (dest) == ARRAY_REF
- || TREE_CODE (dest) == MEM_REF)
+ pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
+ return true;
+ }
+
+ if (code == ARRAY_REF || code == MEM_REF)
{
- tree ref = TREE_OPERAND (dest, 0);
+ tree ref = TREE_OPERAND (ptr, 0);
tree reftype = TREE_TYPE (ref);
- if (TREE_CODE (dest) == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
+ if (code == ARRAY_REF
+ && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
+ /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
+ of known bound. */
+ return false;
+
+ if (code == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
{
/* Give up for MEM_REFs of vector types; those may be synthesized
from multiple assignments to consecutive data members. See PR
@@ -3974,158 +4092,178 @@ compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */,
MEM_REFs at the time they're created. */
reftype = TREE_TYPE (reftype);
if (TREE_CODE (reftype) == VECTOR_TYPE)
- return NULL_TREE;
+ return false;
}
- tree off = TREE_OPERAND (dest, 1);
- if (tree size = compute_objsize (ref, ostype, pdecl, poff))
- {
- /* If the declaration of the destination object is known
- to have zero size, return zero. */
- if (integer_zerop (size)
- && *pdecl && DECL_P (*pdecl)
- && *poff && integer_zerop (*poff))
- return size_zero_node;
-
- /* A valid offset into a declared object cannot be negative.
- A zero size with a zero "inner" offset is still zero size
- regardless of the "other" offset OFF. */
- if (*poff
- && ((integer_zerop (*poff) && integer_zerop (size))
- || (TREE_CODE (*poff) == INTEGER_CST
- && tree_int_cst_sgn (*poff) < 0)))
- return size_zero_node;
-
- wide_int offrng[2];
- if (!get_range (off, offrng, rvals))
- return NULL_TREE;
- /* Convert to the same precision to keep wide_int from "helpfully"
- crashing whenever it sees other arguments. */
- const unsigned sizprec = TYPE_PRECISION (sizetype);
- offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
- offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
+ if (!compute_objsize (ref, ostype, pref, visited, rvals))
+ return false;
- /* Adjust SIZE either up or down by the sum of *POFF and OFF
- above. */
- if (TREE_CODE (dest) == ARRAY_REF)
- {
- tree lowbnd = array_ref_low_bound (dest);
- if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
- {
- /* Adjust the offset by the low bound of the array
- domain (normally zero but 1 in Fortran). */
- unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
- offrng[0] -= lb;
- offrng[1] -= lb;
- }
+ offset_int orng[2];
+ tree off = TREE_OPERAND (ptr, 1);
+ if (!get_range (off, SIGNED, orng, rvals))
+ /* Fail unless the size of the object is zero. */
+ return pref->sizrng[0] == 0 && pref->sizrng[0] == pref->sizrng[1];
- /* Convert the array index into a byte offset. */
- tree eltype = TREE_TYPE (dest);
- tree tpsize = TYPE_SIZE_UNIT (eltype);
- if (tpsize && TREE_CODE (tpsize) == INTEGER_CST)
- {
- wide_int wsz = wi::to_wide (tpsize, offrng->get_precision ());
- offrng[0] *= wsz;
- offrng[1] *= wsz;
- }
- else
- return NULL_TREE;
+ if (TREE_CODE (ptr) == ARRAY_REF)
+ {
+ /* Convert the array index range determined above to a byte
+ offset. */
+ tree lowbnd = array_ref_low_bound (ptr);
+ if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
+ {
+ /* Adjust the index by the low bound of the array domain
+ (normally zero but 1 in Fortran). */
+ unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
+ orng[0] -= lb;
+ orng[1] -= lb;
}
- wide_int wisize = wi::to_wide (size);
+ tree eltype = TREE_TYPE (ptr);
+ tree tpsize = TYPE_SIZE_UNIT (eltype);
+ if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
+ return false;
+
+ offset_int sz = wi::to_offset (tpsize);
+ orng[0] *= sz;
+ orng[1] *= sz;
- if (!*poff)
+ if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
{
- /* If the "inner" offset is unknown and the "outer" offset
- is either negative or less than SIZE, return the size
- minus the offset. This may be overly optimistic in
- the first case if the inner offset happens to be less
- than the absolute value of the outer offset. */
- if (wi::neg_p (offrng[0]))
- return size;
- if (wi::ltu_p (offrng[0], wisize))
- return build_int_cst (sizetype, (wisize - offrng[0]).to_uhwi ());
- return size_zero_node;
+ /* Execpt for the permissive raw memory functions which
+ use the size of the whole object determined above,
+ use the size of the referenced array. */
+ pref->sizrng[0] = pref->offrng[0] + orng[0] + sz;
+ pref->sizrng[1] = pref->offrng[1] + orng[1] + sz;
}
+ }
- /* Convert to the same precision to keep wide_int from "helpfuly"
- crashing whenever it sees other argumments. */
- offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
- offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
+ pref->offrng[0] += orng[0];
+ pref->offrng[1] += orng[1];
- tree dstoff = *poff;
- if (integer_zerop (*poff))
- *poff = off;
- else if (!integer_zerop (off))
+ return true;
+ }
+
+ if (TREE_CODE (ptr) == SSA_NAME)
+ {
+ gimple *stmt = SSA_NAME_DEF_STMT (ptr);
+ if (is_gimple_call (stmt))
+ {
+ /* If STMT is a call to an allocation function get the size
+ from its argument(s). If successful, also set *PDECL to
+ PTR for the caller to include in diagnostics. */
+ wide_int wr[2];
+ if (gimple_call_alloc_size (stmt, wr, rvals))
{
- *poff = fold_convert (ptrdiff_type_node, *poff);
- off = fold_convert (ptrdiff_type_node, off);
- *poff = size_binop (PLUS_EXPR, *poff, off);
+ pref->ref = ptr;
+ pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
+ pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
+ return true;
}
+ return false;
+ }
- if (!wi::neg_p (offrng[0]))
- {
- if (TREE_CODE (size) != INTEGER_CST)
- return NULL_TREE;
+ /* TODO: Handle PHI. */
- /* Return the difference between the size and the offset
- or zero if the offset is greater. */
- wide_int wisize = wi::to_wide (size, sizprec);
- if (wi::ltu_p (wisize, offrng[0]))
- return size_zero_node;
+ if (!is_gimple_assign (stmt))
+ return false;
- return wide_int_to_tree (sizetype, wisize - offrng[0]);
- }
+ ptr = gimple_assign_rhs1 (stmt);
- wide_int dstoffrng[2];
- if (TREE_CODE (dstoff) == INTEGER_CST)
- dstoffrng[0] = dstoffrng[1] = wi::to_wide (dstoff);
- else if (TREE_CODE (dstoff) == SSA_NAME)
+ tree_code code = gimple_assign_rhs_code (stmt);
+ if (TREE_CODE (TREE_TYPE (ptr)) != POINTER_TYPE)
+ /* Avoid conversions from non-pointers. */
+ return false;
+
+ if (code == POINTER_PLUS_EXPR)
+ {
+ /* If the the offset in the expression can be determined use
+ it to adjust the overall offset. Otherwise, set the overall
+ offset to the maximum. */
+ offset_int orng[2];
+ tree off = gimple_assign_rhs2 (stmt);
+ if (!get_range (off, SIGNED, orng, rvals)
+ || !wi::les_p (orng[0], orng[1]))
{
- enum value_range_kind rng
- = get_range_info (dstoff, dstoffrng, dstoffrng + 1);
- if (rng != VR_RANGE)
- return NULL_TREE;
+ orng[0] = wi::to_offset (TYPE_MIN_VALUE (ptrdiff_type_node));
+ orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
}
- else
- return NULL_TREE;
-
- dstoffrng[0] = wide_int::from (dstoffrng[0], sizprec, SIGNED);
- dstoffrng[1] = wide_int::from (dstoffrng[1], sizprec, SIGNED);
- if (!wi::neg_p (dstoffrng[0]))
- wisize += dstoffrng[0];
-
- offrng[1] += dstoffrng[1];
- if (wi::neg_p (offrng[1]))
- return size_zero_node;
-
- return wide_int_to_tree (sizetype, wisize);
+ pref->offrng[0] += orng[0];
+ pref->offrng[1] += orng[1];
}
+ else if (code != ADDR_EXPR)
+ return false;
- return NULL_TREE;
+ return compute_objsize (ptr, ostype, pref, visited, rvals);
}
- /* Try simple DECLs not handled above. */
- if (tree size = addr_decl_size (dest, pdecl, poff))
- return size;
-
- tree type = TREE_TYPE (dest);
- if (TREE_CODE (type) == POINTER_TYPE)
- type = TREE_TYPE (type);
-
+ tree type = TREE_TYPE (ptr);
type = TYPE_MAIN_VARIANT (type);
- if (TREE_CODE (dest) == ADDR_EXPR)
- dest = TREE_OPERAND (dest, 0);
+ if (TREE_CODE (ptr) == ADDR_EXPR)
+ ptr = TREE_OPERAND (ptr, 0);
if (TREE_CODE (type) == ARRAY_TYPE
- && !array_at_struct_end_p (dest))
+ && !array_at_struct_end_p (ptr))
{
if (tree size = TYPE_SIZE_UNIT (type))
- return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
+ return get_range (size, UNSIGNED, pref->sizrng, rvals);
}
- return NULL_TREE;
+ return false;
+}
+
+/* Convenience wrapper around the above. */
+
+static tree
+compute_objsize (tree ptr, int ostype, access_ref *pref,
+ const vr_values *rvals = NULL)
+{
+ bitmap visited = NULL;
+
+ bool success
+ = compute_objsize (ptr, ostype, pref, &visited, rvals);
+
+ if (visited)
+ BITMAP_FREE (visited);
+
+ if (!success)
+ return NULL_TREE;
+
+ if (pref->offrng[0] < 0)
+ {
+ if (pref->offrng[1] < 0)
+ return size_zero_node;
+
+ pref->offrng[0] = 0;
+ }
+
+ if (pref->sizrng[1] < pref->offrng[0])
+ return size_zero_node;
+
+ return wide_int_to_tree (sizetype, pref->sizrng[1] - pref->offrng[0]);
+}
+
+/* Transitional wrapper around the above. The function should be removed
+ once callers transition to one of the two above. */
+
+tree
+compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
+ tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
+{
+ /* Set the initial offsets to zero and size to negative to indicate
+ none has been computed yet. */
+ access_ref ref;
+ tree size = compute_objsize (ptr, ostype, &ref, rvals);
+ if (!size)
+ return NULL_TREE;
+
+ if (pdecl)
+ *pdecl = ref.ref;
+
+ if (poff)
+ *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
+
+ return size;
}
/* Helper to determine and check the sizes of the source and the destination
@@ -4142,11 +4280,12 @@ check_memop_access (tree exp, tree dest, tree src, tree size)
try to determine the size of the largest source and destination
object using type-0 Object Size regardless of the object size
type specified by the option. */
- tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
- tree dstsize = compute_objsize (dest, 0);
+ access_data data;
+ tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
+ tree dstsize = compute_objsize (dest, 0, &data.dst);
return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
- srcsize, dstsize);
+ srcsize, dstsize, true, &data);
}
/* Validate memchr arguments without performing any expansion.
@@ -4166,9 +4305,11 @@ expand_builtin_memchr (tree exp, rtx)
of the object. */
if (warn_stringop_overflow)
{
- tree size = compute_objsize (arg1, 0);
+ access_data data;
+ tree size = compute_objsize (arg1, 0, &data.src);
check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
- /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
+ /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE,
+ true, &data);
}
return NULL_RTX;
@@ -4265,7 +4406,6 @@ expand_builtin_memory_copy_args (tree dest, tree src, tree len,
rtx target, tree exp, memop_ret retmode,
bool might_overlap)
{
- const char *src_str;
unsigned int src_align = get_pointer_alignment (src);
unsigned int dest_align = get_pointer_alignment (dest);
rtx dest_mem, src_mem, dest_addr, len_rtx;
@@ -4297,24 +4437,29 @@ expand_builtin_memory_copy_args (tree dest, tree src, tree len,
len_rtx = expand_normal (len);
determine_block_size (len, len_rtx, &min_size, &max_size,
&probable_max_size);
- src_str = c_getstr (src);
-
- /* If SRC is a string constant and block move would be done by
- pieces, we can avoid loading the string from memory and only
- stored the computed constants. This works in the overlap
- (memmove) case as well because store_by_pieces just generates a
- series of stores of constants from the string constant returned
- by c_getstr(). */
- if (src_str
+
+ /* Try to get the byte representation of the constant SRC points to,
+ with its byte size in NBYTES. */
+ unsigned HOST_WIDE_INT nbytes;
+ const char *rep = getbyterep (src, &nbytes);
+
+ /* If the function's constant bound LEN_RTX is less than or equal
+ to the byte size of the representation of the constant argument,
+ and if block move would be done by pieces, we can avoid loading
+ the bytes from memory and only store the computed constant.
+ This works in the overlap (memmove) case as well because
+ store_by_pieces just generates a series of stores of constants
+ from the representation returned by getbyterep(). */
+ if (rep
&& CONST_INT_P (len_rtx)
- && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
+ && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
&& can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
- CONST_CAST (char *, src_str),
+ CONST_CAST (char *, rep),
dest_align, false))
{
dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
builtin_memcpy_read_str,
- CONST_CAST (char *, src_str),
+ CONST_CAST (char *, rep),
dest_align, false, retmode);
dest_mem = force_operand (XEXP (dest_mem, 0), target);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
@@ -4338,7 +4483,8 @@ expand_builtin_memory_copy_args (tree dest, tree src, tree len,
dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
expected_align, expected_size,
min_size, max_size, probable_max_size,
- use_mempcpy_call, &is_move_done, might_overlap);
+ use_mempcpy_call, &is_move_done,
+ might_overlap);
/* Bail out when a mempcpy call would be expanded as libcall and when
we have a target that provides a fast implementation
@@ -4443,10 +4589,11 @@ expand_builtin_strcat (tree exp)
just diagnose cases when the souce string is longer than
the destination object. */
- tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
+ access_data data;
+ tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
- destsize);
+ destsize, true, &data);
return NULL_RTX;
}
@@ -4467,9 +4614,11 @@ expand_builtin_strcpy (tree exp, rtx target)
if (warn_stringop_overflow)
{
- tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
+ access_data data;
+ tree destsize = compute_objsize (dest, warn_stringop_overflow - 1,
+ &data.dst);
check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
- src, destsize);
+ src, destsize, true, &data);
}
if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
@@ -4525,9 +4674,11 @@ expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
if (warn_stringop_overflow)
{
- tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
+ access_data data;
+ tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
+ &data.dst);
check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
- src, destsize);
+ src, destsize, true, &data);
}
/* If return value is ignored, transform stpcpy into strcpy. */
@@ -4547,7 +4698,7 @@ expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
because the latter will potentially produce pessimized code
when used to produce the return value. */
c_strlen_data lendata = { };
- if (!c_getstr (src, NULL)
+ if (!c_getstr (src)
|| !(len = c_strlen (src, 0, &lendata, 1)))
return expand_movstr (dst, src, target,
/*retmode=*/ RETURN_END_MINUS_ONE);
@@ -4636,10 +4787,12 @@ expand_builtin_stpncpy (tree exp, rtx)
if (!check_nul_terminated_array (exp, src, len))
return NULL_RTX;
+ access_data data;
/* The size of the destination object. */
- tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
+ tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
- check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
+ check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize,
+ true, &data);
return NULL_RTX;
}
@@ -4679,12 +4832,13 @@ check_strncat_sizes (tree exp, tree objsize)
/* Try to verify that the destination is big enough for the shortest
string. */
+ access_data data;
if (!objsize && warn_stringop_overflow)
{
/* If it hasn't been provided by __strncat_chk, try to determine
the size of the destination object into which the source is
being copied. */
- objsize = compute_objsize (dest, warn_stringop_overflow - 1);
+ objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
}
/* Add one for the terminating nul. */
@@ -4715,10 +4869,10 @@ check_strncat_sizes (tree exp, tree objsize)
&& tree_int_cst_lt (maxread, srclen)))
srclen = maxread;
- /* The number of bytes to write is LEN but check_access will also
+ /* The number of bytes to write is LEN but check_access will alsoa
check SRCLEN if LEN's value isn't known. */
return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
- objsize);
+ objsize, true, &data);
}
/* Similar to expand_builtin_strcat, do some very basic size validation
@@ -4756,10 +4910,11 @@ expand_builtin_strncat (tree exp, rtx)
maxlen = lendata.maxbound;
}
+ access_data data;
/* Try to verify that the destination is big enough for the shortest
string. First try to determine the size of the destination object
into which the source is being copied. */
- tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
+ tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
/* Add one for the terminating nul. */
tree srclen = (maxlen
@@ -4789,8 +4944,8 @@ expand_builtin_strncat (tree exp, rtx)
&& tree_int_cst_lt (maxread, srclen)))
srclen = maxread;
- /* The number of bytes to write is SRCLEN. */
- check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
+ check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize,
+ true, &data);
return NULL_RTX;
}
@@ -4819,13 +4974,14 @@ expand_builtin_strncpy (tree exp, rtx target)
if (warn_stringop_overflow)
{
- tree destsize = compute_objsize (dest,
- warn_stringop_overflow - 1);
+ access_data data;
+ tree destsize = compute_objsize (dest, warn_stringop_overflow - 1,
+ &data.dst);
/* The number of bytes to write is LEN but check_access will also
check SLEN if LEN's value isn't known. */
check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
- destsize);
+ destsize, true, &data);
}
/* We must be passed a constant len and src parameter. */
@@ -5138,16 +5294,18 @@ expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
/* Diagnose calls where the specified length exceeds the size of either
object. */
- tree size = compute_objsize (arg1, 0);
+ access_data data;
+ tree size = compute_objsize (arg1, 0, &data.src);
no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
len, /*maxread=*/NULL_TREE, size,
- /*objsize=*/NULL_TREE);
+ /*objsize=*/NULL_TREE, true, &data);
if (no_overflow)
{
- size = compute_objsize (arg2, 0);
+ access_data data;
+ size = compute_objsize (arg2, 0, &data.src);
no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
len, /*maxread=*/NULL_TREE, size,
- /*objsize=*/NULL_TREE);
+ /*objsize=*/NULL_TREE, true, &data);
}
/* If the specified length exceeds the size of either object,
@@ -5161,7 +5319,7 @@ expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
if (!result_eq && fcode != BUILT_IN_BCMP)
{
- result = inline_expand_builtin_string_cmp (exp, target);
+ result = inline_expand_builtin_bytecmp (exp, target);
if (result)
return result;
}
@@ -5189,26 +5347,32 @@ expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
by_pieces_constfn constfn = NULL;
- const char *src_str = c_getstr (arg2);
- if (result_eq && src_str == NULL)
+ /* Try to get the byte representation of the constant ARG2 (or, only
+ when the function's result is used for equality to zero, ARG1)
+ points to, with its byte size in NBYTES. */
+ unsigned HOST_WIDE_INT nbytes;
+ const char *rep = getbyterep (arg2, &nbytes);
+ if (result_eq && rep == NULL)
{
- src_str = c_getstr (arg1);
- if (src_str != NULL)
+ /* For equality to zero the arguments are interchangeable. */
+ rep = getbyterep (arg1, &nbytes);
+ if (rep != NULL)
std::swap (arg1_rtx, arg2_rtx);
}
- /* If SRC is a string constant and block move would be done
- by pieces, we can avoid loading the string from memory
- and only stored the computed constants. */
- if (src_str
+ /* If the function's constant bound LEN_RTX is less than or equal
+ to the byte size of the representation of the constant argument,
+ and if block move would be done by pieces, we can avoid loading
+ the bytes from memory and only store the computed constant result. */
+ if (rep
&& CONST_INT_P (len_rtx)
- && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
+ && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
constfn = builtin_memcpy_read_str;
result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
TREE_TYPE (len), target,
result_eq, constfn,
- CONST_CAST (char *, src_str));
+ CONST_CAST (char *, rep));
if (result)
{
@@ -5247,7 +5411,7 @@ expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
/* Due to the performance benefit, always inline the calls first. */
rtx result = NULL_RTX;
- result = inline_expand_builtin_string_cmp (exp, target);
+ result = inline_expand_builtin_bytecmp (exp, target);
if (result)
return result;
@@ -5371,7 +5535,7 @@ expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
/* Due to the performance benefit, always inline the calls first. */
rtx result = NULL_RTX;
- result = inline_expand_builtin_string_cmp (exp, target);
+ result = inline_expand_builtin_bytecmp (exp, target);
if (result)
return result;
@@ -6513,6 +6677,27 @@ expand_expr_force_mode (tree exp, machine_mode mode)
rtx val;
machine_mode old_mode;
+ if (TREE_CODE (exp) == SSA_NAME
+ && TYPE_MODE (TREE_TYPE (exp)) != mode)
+ {
+ /* Undo argument promotion if possible, as combine might not
+ be able to do it later due to MEM_VOLATILE_P uses in the
+ patterns. */
+ gimple *g = get_gimple_for_ssa_name (exp);
+ if (g && gimple_assign_cast_p (g))
+ {
+ tree rhs = gimple_assign_rhs1 (g);
+ tree_code code = gimple_assign_rhs_code (g);
+ if (CONVERT_EXPR_CODE_P (code)
+ && TYPE_MODE (TREE_TYPE (rhs)) == mode
+ && INTEGRAL_TYPE_P (TREE_TYPE (exp))
+ && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
+ && (TYPE_PRECISION (TREE_TYPE (exp))
+ > TYPE_PRECISION (TREE_TYPE (rhs))))
+ exp = rhs;
+ }
+ }
+
val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
/* If VAL is promoted to a wider mode, convert it back to MODE. Take care
of CONST_INTs, where we know the old_mode only from the call argument. */
@@ -7583,18 +7768,18 @@ inline_string_cmp (rtx target, tree var_str, const char *const_str,
return result;
}
-/* Inline expansion a call to str(n)cmp, with result going to
- TARGET if that's convenient.
+/* Inline expansion of a call to str(n)cmp and memcmp, with result going
+ to TARGET if that's convenient.
If the call is not been inlined, return NULL_RTX. */
+
static rtx
-inline_expand_builtin_string_cmp (tree exp, rtx target)
+inline_expand_builtin_bytecmp (tree exp, rtx target)
{
tree fndecl = get_callee_fndecl (exp);
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
- unsigned HOST_WIDE_INT length = 0;
bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
- /* Do NOT apply this inlining expansion when optimizing for size or
+ /* Do NOT apply this inlining expansion when optimizing for size or
optimization level below 2. */
if (optimize < 2 || optimize_insn_for_size_p ())
return NULL_RTX;
@@ -7617,29 +7802,47 @@ inline_expand_builtin_string_cmp (tree exp, rtx target)
unsigned HOST_WIDE_INT len2 = 0;
unsigned HOST_WIDE_INT len3 = 0;
- const char *src_str1 = c_getstr (arg1, &len1);
- const char *src_str2 = c_getstr (arg2, &len2);
+ /* Get the object representation of the initializers of ARG1 and ARG2
+ as strings, provided they refer to constant objects, with their byte
+ sizes in LEN1 and LEN2, respectively. */
+ const char *bytes1 = getbyterep (arg1, &len1);
+ const char *bytes2 = getbyterep (arg2, &len2);
- /* If neither strings is constant string, the call is not qualify. */
- if (!src_str1 && !src_str2)
+ /* Fail if neither argument refers to an initialized constant. */
+ if (!bytes1 && !bytes2)
return NULL_RTX;
- /* For strncmp, if the length is not a const, not qualify. */
if (is_ncmp)
{
+ /* Fail if the memcmp/strncmp bound is not a constant. */
if (!tree_fits_uhwi_p (len3_tree))
return NULL_RTX;
- else
- len3 = tree_to_uhwi (len3_tree);
- }
- if (src_str1 != NULL)
- len1 = strnlen (src_str1, len1) + 1;
+ len3 = tree_to_uhwi (len3_tree);
- if (src_str2 != NULL)
- len2 = strnlen (src_str2, len2) + 1;
+ if (fcode == BUILT_IN_MEMCMP)
+ {
+ /* Fail if the memcmp bound is greater than the size of either
+ of the two constant objects. */
+ if ((bytes1 && len1 < len3)
+ || (bytes2 && len2 < len3))
+ return NULL_RTX;
+ }
+ }
- int const_str_n = 0;
+ if (fcode != BUILT_IN_MEMCMP)
+ {
+ /* For string functions (i.e., strcmp and strncmp) reduce LEN1
+ and LEN2 to the length of the nul-terminated string stored
+ in each. */
+ if (bytes1 != NULL)
+ len1 = strnlen (bytes1, len1) + 1;
+ if (bytes2 != NULL)
+ len2 = strnlen (bytes2, len2) + 1;
+ }
+
+ /* See inline_string_cmp. */
+ int const_str_n;
if (!len1)
const_str_n = 2;
else if (!len2)
@@ -7649,23 +7852,23 @@ inline_expand_builtin_string_cmp (tree exp, rtx target)
else
const_str_n = 2;
- gcc_checking_assert (const_str_n > 0);
- length = (const_str_n == 1) ? len1 : len2;
-
- if (is_ncmp && len3 < length)
- length = len3;
+ /* For strncmp only, compute the new bound as the smallest of
+ the lengths of the two strings (plus 1) and the bound provided
+ to the function. */
+ unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
+ if (is_ncmp && len3 < bound)
+ bound = len3;
- /* If the length of the comparision is larger than the threshold,
+ /* If the bound of the comparison is larger than the threshold,
do nothing. */
- if (length > (unsigned HOST_WIDE_INT)
- param_builtin_string_cmp_inline_length)
+ if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
return NULL_RTX;
machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
/* Now, start inline expansion the call. */
return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
- (const_str_n == 1) ? src_str1 : src_str2, length,
+ (const_str_n == 1) ? bytes1 : bytes2, bound,
const_str_n, mode);
}
@@ -7988,6 +8191,7 @@ expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
case BUILT_IN_BSWAP16:
case BUILT_IN_BSWAP32:
case BUILT_IN_BSWAP64:
+ case BUILT_IN_BSWAP128:
target = expand_builtin_bswap (target_mode, exp, target, subtarget);
if (target)
return target;
@@ -11704,6 +11908,7 @@ is_inexpensive_builtin (tree decl)
case BUILT_IN_BSWAP16:
case BUILT_IN_BSWAP32:
case BUILT_IN_BSWAP64:
+ case BUILT_IN_BSWAP128:
case BUILT_IN_CLZ:
case BUILT_IN_CLZIMAX:
case BUILT_IN_CLZL: