diff options
Diffstat (limited to 'gcc/builtins.c')
-rw-r--r-- | gcc/builtins.c | 4203 |
1 files changed, 390 insertions, 3813 deletions
diff --git a/gcc/builtins.c b/gcc/builtins.c index 196dda3..3e57eb0 100644 --- a/gcc/builtins.c +++ b/gcc/builtins.c @@ -43,7 +43,7 @@ along with GCC; see the file COPYING3. If not see #include "alias.h" #include "fold-const.h" #include "fold-const-call.h" -#include "gimple-ssa-warn-restrict.h" +#include "gimple-ssa-warn-access.h" #include "stor-layout.h" #include "calls.h" #include "varasm.h" @@ -79,6 +79,8 @@ along with GCC; see the file COPYING3. If not see #include "tree-outof-ssa.h" #include "attr-fnspec.h" #include "demangle.h" +#include "gimple-range.h" +#include "pointer-query.h" struct target_builtins default_target_builtins; #if SWITCHABLE_TARGET @@ -128,8 +130,6 @@ static rtx expand_builtin_va_copy (tree); static rtx inline_expand_builtin_bytecmp (tree, rtx); static rtx expand_builtin_strcmp (tree, rtx); static rtx expand_builtin_strncmp (tree, rtx, machine_mode); -static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode); -static rtx expand_builtin_memchr (tree, rtx); static rtx expand_builtin_memcpy (tree, rtx); static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len, rtx target, tree exp, @@ -138,15 +138,10 @@ static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len, static rtx expand_builtin_memmove (tree, rtx); static rtx expand_builtin_mempcpy (tree, rtx); static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret); -static rtx expand_builtin_strcat (tree); static rtx expand_builtin_strcpy (tree, rtx); static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx); static rtx expand_builtin_stpcpy (tree, rtx, machine_mode); -static rtx expand_builtin_stpncpy (tree, rtx); -static rtx expand_builtin_strncat (tree, rtx); static rtx expand_builtin_strncpy (tree, rtx); -static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode); -static rtx expand_builtin_memset (tree, rtx, machine_mode); static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree); static rtx expand_builtin_bzero (tree); static rtx expand_builtin_strlen (tree, rtx, machine_mode); @@ -185,9 +180,6 @@ static rtx expand_builtin_memory_chk (tree, rtx, machine_mode, static void maybe_emit_chk_warning (tree, enum built_in_function); static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function); static tree fold_builtin_object_size (tree, tree); -static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1); -static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &, - pointer_query *); unsigned HOST_WIDE_INT target_newline; unsigned HOST_WIDE_INT target_percent; @@ -200,553 +192,14 @@ static tree do_mpfr_remquo (tree, tree, tree); static tree do_mpfr_lgamma_r (tree, tree, tree); static void expand_builtin_sync_synchronize (void); -access_ref::access_ref (tree bound /* = NULL_TREE */, - bool minaccess /* = false */) -: ref (), eval ([](tree x){ return x; }), deref (), trail1special (true), - base0 (true), parmarray () -{ - /* Set to valid. */ - offrng[0] = offrng[1] = 0; - /* Invalidate. */ - sizrng[0] = sizrng[1] = -1; - - /* Set the default bounds of the access and adjust below. */ - bndrng[0] = minaccess ? 1 : 0; - bndrng[1] = HOST_WIDE_INT_M1U; - - /* When BOUND is nonnull and a range can be extracted from it, - set the bounds of the access to reflect both it and MINACCESS. - BNDRNG[0] is the size of the minimum access. */ - tree rng[2]; - if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO)) - { - bndrng[0] = wi::to_offset (rng[0]); - bndrng[1] = wi::to_offset (rng[1]); - bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0; - } -} - -/* Return the PHI node REF refers to or null if it doesn't. */ - -gphi * -access_ref::phi () const -{ - if (!ref || TREE_CODE (ref) != SSA_NAME) - return NULL; - - gimple *def_stmt = SSA_NAME_DEF_STMT (ref); - if (gimple_code (def_stmt) != GIMPLE_PHI) - return NULL; - - return as_a <gphi *> (def_stmt); -} - -/* Determine and return the largest object to which *THIS. If *THIS - refers to a PHI and PREF is nonnull, fill *PREF with the details - of the object determined by compute_objsize(ARG, OSTYPE) for each - PHI argument ARG. */ - -tree -access_ref::get_ref (vec<access_ref> *all_refs, - access_ref *pref /* = NULL */, - int ostype /* = 1 */, - ssa_name_limit_t *psnlim /* = NULL */, - pointer_query *qry /* = NULL */) const -{ - gphi *phi_stmt = this->phi (); - if (!phi_stmt) - return ref; - - /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might - cause unbounded recursion. */ - ssa_name_limit_t snlim_buf; - if (!psnlim) - psnlim = &snlim_buf; - - if (!psnlim->visit_phi (ref)) - return NULL_TREE; - - /* Reflects the range of offsets of all PHI arguments refer to the same - object (i.e., have the same REF). */ - access_ref same_ref; - /* The conservative result of the PHI reflecting the offset and size - of the largest PHI argument, regardless of whether or not they all - refer to the same object. */ - pointer_query empty_qry; - if (!qry) - qry = &empty_qry; - - access_ref phi_ref; - if (pref) - { - phi_ref = *pref; - same_ref = *pref; - } - - /* Set if any argument is a function array (or VLA) parameter not - declared [static]. */ - bool parmarray = false; - /* The size of the smallest object referenced by the PHI arguments. */ - offset_int minsize = 0; - const offset_int maxobjsize = wi::to_offset (max_object_size ()); - /* The offset of the PHI, not reflecting those of its arguments. */ - const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] }; - - const unsigned nargs = gimple_phi_num_args (phi_stmt); - for (unsigned i = 0; i < nargs; ++i) - { - access_ref phi_arg_ref; - tree arg = gimple_phi_arg_def (phi_stmt, i); - if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry) - || phi_arg_ref.sizrng[0] < 0) - /* A PHI with all null pointer arguments. */ - return NULL_TREE; - - /* Add PREF's offset to that of the argument. */ - phi_arg_ref.add_offset (orng[0], orng[1]); - if (TREE_CODE (arg) == SSA_NAME) - qry->put_ref (arg, phi_arg_ref); - - if (all_refs) - all_refs->safe_push (phi_arg_ref); - - const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0 - || phi_arg_ref.sizrng[1] != maxobjsize); - - parmarray |= phi_arg_ref.parmarray; - - const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs); - - if (phi_ref.sizrng[0] < 0) - { - if (!nullp) - same_ref = phi_arg_ref; - phi_ref = phi_arg_ref; - if (arg_known_size) - minsize = phi_arg_ref.sizrng[0]; - continue; - } - - const bool phi_known_size = (phi_ref.sizrng[0] != 0 - || phi_ref.sizrng[1] != maxobjsize); - - if (phi_known_size && phi_arg_ref.sizrng[0] < minsize) - minsize = phi_arg_ref.sizrng[0]; - - /* Disregard null pointers in PHIs with two or more arguments. - TODO: Handle this better! */ - if (nullp) - continue; - - /* Determine the amount of remaining space in the argument. */ - offset_int argrem[2]; - argrem[1] = phi_arg_ref.size_remaining (argrem); - - /* Determine the amount of remaining space computed so far and - if the remaining space in the argument is more use it instead. */ - offset_int phirem[2]; - phirem[1] = phi_ref.size_remaining (phirem); - - if (phi_arg_ref.ref != same_ref.ref) - same_ref.ref = NULL_TREE; - - if (phirem[1] < argrem[1] - || (phirem[1] == argrem[1] - && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1])) - /* Use the argument with the most space remaining as the result, - or the larger one if the space is equal. */ - phi_ref = phi_arg_ref; - - /* Set SAME_REF.OFFRNG to the maximum range of all arguments. */ - if (phi_arg_ref.offrng[0] < same_ref.offrng[0]) - same_ref.offrng[0] = phi_arg_ref.offrng[0]; - if (same_ref.offrng[1] < phi_arg_ref.offrng[1]) - same_ref.offrng[1] = phi_arg_ref.offrng[1]; - } - - if (phi_ref.sizrng[0] < 0) - { - /* Fail if none of the PHI's arguments resulted in updating PHI_REF - (perhaps because they have all been already visited by prior - recursive calls). */ - psnlim->leave_phi (ref); - return NULL_TREE; - } - - if (!same_ref.ref && same_ref.offrng[0] != 0) - /* Clear BASE0 if not all the arguments refer to the same object and - if not all their offsets are zero-based. This allows the final - PHI offset to out of bounds for some arguments but not for others - (or negative even of all the arguments are BASE0), which is overly - permissive. */ - phi_ref.base0 = false; - - if (same_ref.ref) - phi_ref = same_ref; - else - { - /* Replace the lower bound of the largest argument with the size - of the smallest argument, and set PARMARRAY if any argument - was one. */ - phi_ref.sizrng[0] = minsize; - phi_ref.parmarray = parmarray; - } - - /* Avoid changing *THIS. */ - if (pref && pref != this) - *pref = phi_ref; - - psnlim->leave_phi (ref); - - return phi_ref.ref; -} - -/* Return the maximum amount of space remaining and if non-null, set - argument to the minimum. */ - -offset_int -access_ref::size_remaining (offset_int *pmin /* = NULL */) const -{ - offset_int minbuf; - if (!pmin) - pmin = &minbuf; - - /* add_offset() ensures the offset range isn't inverted. */ - gcc_checking_assert (offrng[0] <= offrng[1]); - - if (base0) - { - /* The offset into referenced object is zero-based (i.e., it's - not referenced by a pointer into middle of some unknown object). */ - if (offrng[0] < 0 && offrng[1] < 0) - { - /* If the offset is negative the remaining size is zero. */ - *pmin = 0; - return 0; - } - - if (sizrng[1] <= offrng[0]) - { - /* If the starting offset is greater than or equal to the upper - bound on the size of the object, the space remaining is zero. - As a special case, if it's equal, set *PMIN to -1 to let - the caller know the offset is valid and just past the end. */ - *pmin = sizrng[1] == offrng[0] ? -1 : 0; - return 0; - } - - /* Otherwise return the size minus the lower bound of the offset. */ - offset_int or0 = offrng[0] < 0 ? 0 : offrng[0]; - - *pmin = sizrng[0] - or0; - return sizrng[1] - or0; - } - - /* The offset to the referenced object isn't zero-based (i.e., it may - refer to a byte other than the first. The size of such an object - is constrained only by the size of the address space (the result - of max_object_size()). */ - if (sizrng[1] <= offrng[0]) - { - *pmin = 0; - return 0; - } - - offset_int or0 = offrng[0] < 0 ? 0 : offrng[0]; - - *pmin = sizrng[0] - or0; - return sizrng[1] - or0; -} - -/* Add the range [MIN, MAX] to the offset range. For known objects (with - zero-based offsets) at least one of whose offset's bounds is in range, - constrain the other (or both) to the bounds of the object (i.e., zero - and the upper bound of its size). This improves the quality of - diagnostics. */ - -void access_ref::add_offset (const offset_int &min, const offset_int &max) -{ - if (min <= max) - { - /* To add an ordinary range just add it to the bounds. */ - offrng[0] += min; - offrng[1] += max; - } - else if (!base0) - { - /* To add an inverted range to an offset to an unknown object - expand it to the maximum. */ - add_max_offset (); - return; - } - else - { - /* To add an inverted range to an offset to an known object set - the upper bound to the maximum representable offset value - (which may be greater than MAX_OBJECT_SIZE). - The lower bound is either the sum of the current offset and - MIN when abs(MAX) is greater than the former, or zero otherwise. - Zero because then then inverted range includes the negative of - the lower bound. */ - offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); - offrng[1] = maxoff; - - if (max >= 0) - { - offrng[0] = 0; - return; - } - - offset_int absmax = wi::abs (max); - if (offrng[0] < absmax) - { - offrng[0] += min; - /* Cap the lower bound at the upper (set to MAXOFF above) - to avoid inadvertently recreating an inverted range. */ - if (offrng[1] < offrng[0]) - offrng[0] = offrng[1]; - } - else - offrng[0] = 0; - } - - if (!base0) - return; - - /* When referencing a known object check to see if the offset computed - so far is in bounds... */ - offset_int remrng[2]; - remrng[1] = size_remaining (remrng); - if (remrng[1] > 0 || remrng[0] < 0) - { - /* ...if so, constrain it so that neither bound exceeds the size of - the object. Out of bounds offsets are left unchanged, and, for - better or worse, become in bounds later. They should be detected - and diagnosed at the point they first become invalid by - -Warray-bounds. */ - if (offrng[0] < 0) - offrng[0] = 0; - if (offrng[1] > sizrng[1]) - offrng[1] = sizrng[1]; - } -} - -/* Set a bit for the PHI in VISITED and return true if it wasn't - already set. */ - -bool -ssa_name_limit_t::visit_phi (tree ssa_name) -{ - if (!visited) - visited = BITMAP_ALLOC (NULL); - - /* Return false if SSA_NAME has already been visited. */ - return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name)); -} - -/* Clear a bit for the PHI in VISITED. */ - -void -ssa_name_limit_t::leave_phi (tree ssa_name) -{ - /* Return false if SSA_NAME has already been visited. */ - bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name)); -} - -/* Return false if the SSA_NAME chain length counter has reached - the limit, otherwise increment the counter and return true. */ - -bool -ssa_name_limit_t::next () -{ - /* Return a negative value to let caller avoid recursing beyond - the specified limit. */ - if (ssa_def_max == 0) - return false; - - --ssa_def_max; - return true; -} - -/* If the SSA_NAME has already been "seen" return a positive value. - Otherwise add it to VISITED. If the SSA_NAME limit has been - reached, return a negative value. Otherwise return zero. */ - -int -ssa_name_limit_t::next_phi (tree ssa_name) -{ - { - gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name); - /* Return a positive value if the PHI has already been visited. */ - if (gimple_code (def_stmt) == GIMPLE_PHI - && !visit_phi (ssa_name)) - return 1; - } - - /* Return a negative value to let caller avoid recursing beyond - the specified limit. */ - if (ssa_def_max == 0) - return -1; - - --ssa_def_max; - - return 0; -} - -ssa_name_limit_t::~ssa_name_limit_t () -{ - if (visited) - BITMAP_FREE (visited); -} - -/* Default ctor. Initialize object with pointers to the range_query - and cache_type instances to use or null. */ - -pointer_query::pointer_query (range_query *qry /* = NULL */, - cache_type *cache /* = NULL */) -: rvals (qry), var_cache (cache), hits (), misses (), - failures (), depth (), max_depth () -{ - /* No op. */ -} - -/* Return a pointer to the cached access_ref instance for the SSA_NAME - PTR if it's there or null otherwise. */ - -const access_ref * -pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const -{ - if (!var_cache) - { - ++misses; - return NULL; - } - - unsigned version = SSA_NAME_VERSION (ptr); - unsigned idx = version << 1 | (ostype & 1); - if (var_cache->indices.length () <= idx) - { - ++misses; - return NULL; - } - - unsigned cache_idx = var_cache->indices[idx]; - if (var_cache->access_refs.length () <= cache_idx) - { - ++misses; - return NULL; - } - - access_ref &cache_ref = var_cache->access_refs[cache_idx]; - if (cache_ref.ref) - { - ++hits; - return &cache_ref; - } - - ++misses; - return NULL; -} - -/* Retrieve the access_ref instance for a variable from the cache if it's - there or compute it and insert it into the cache if it's nonnonull. */ - -bool -pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */) -{ - const unsigned version - = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0; - - if (var_cache && version) - { - unsigned idx = version << 1 | (ostype & 1); - if (idx < var_cache->indices.length ()) - { - unsigned cache_idx = var_cache->indices[idx] - 1; - if (cache_idx < var_cache->access_refs.length () - && var_cache->access_refs[cache_idx].ref) - { - ++hits; - *pref = var_cache->access_refs[cache_idx]; - return true; - } - } - - ++misses; - } - - if (!compute_objsize (ptr, ostype, pref, this)) - { - ++failures; - return false; - } - - return true; -} - -/* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's - nonnull. */ - -void -pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */) -{ - /* Only add populated/valid entries. */ - if (!var_cache || !ref.ref || ref.sizrng[0] < 0) - return; - - /* Add REF to the two-level cache. */ - unsigned version = SSA_NAME_VERSION (ptr); - unsigned idx = version << 1 | (ostype & 1); - - /* Grow INDICES if necessary. An index is valid if it's nonzero. - Its value minus one is the index into ACCESS_REFS. Not all - entries are valid. */ - if (var_cache->indices.length () <= idx) - var_cache->indices.safe_grow_cleared (idx + 1); - - if (!var_cache->indices[idx]) - var_cache->indices[idx] = var_cache->access_refs.length () + 1; - - /* Grow ACCESS_REF cache if necessary. An entry is valid if its - REF member is nonnull. All entries except for the last two - are valid. Once nonnull, the REF value must stay unchanged. */ - unsigned cache_idx = var_cache->indices[idx]; - if (var_cache->access_refs.length () <= cache_idx) - var_cache->access_refs.safe_grow_cleared (cache_idx + 1); - - access_ref cache_ref = var_cache->access_refs[cache_idx - 1]; - if (cache_ref.ref) - { - gcc_checking_assert (cache_ref.ref == ref.ref); - return; - } - - cache_ref = ref; -} - -/* Flush the cache if it's nonnull. */ - -void -pointer_query::flush_cache () -{ - if (!var_cache) - return; - var_cache->indices.release (); - var_cache->access_refs.release (); -} - /* Return true if NAME starts with __builtin_ or __sync_. */ static bool is_builtin_name (const char *name) { - if (strncmp (name, "__builtin_", 10) == 0) - return true; - if (strncmp (name, "__sync_", 7) == 0) - return true; - if (strncmp (name, "__atomic_", 9) == 0) - return true; - return false; + return (startswith (name, "__builtin_") + || startswith (name, "__sync_") + || startswith (name, "__atomic_")); } /* Return true if NODE should be considered for inline expansion regardless @@ -930,6 +383,10 @@ bool get_object_alignment_1 (tree exp, unsigned int *alignp, unsigned HOST_WIDE_INT *bitposp) { + /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal + with it. */ + if (TREE_CODE (exp) == WITH_SIZE_EXPR) + exp = TREE_OPERAND (exp, 0); return get_object_alignment_2 (exp, alignp, bitposp, false); } @@ -1083,215 +540,6 @@ string_length (const void *ptr, unsigned eltsize, unsigned maxelts) return n; } -/* For a call EXPR at LOC to a function FNAME that expects a string - in the argument ARG, issue a diagnostic due to it being a called - with an argument that is a character array with no terminating - NUL. SIZE is the EXACT size of the array, and BNDRNG the number - of characters in which the NUL is expected. Either EXPR or FNAME - may be null but noth both. SIZE may be null when BNDRNG is null. */ - -void -warn_string_no_nul (location_t loc, tree expr, const char *fname, - tree arg, tree decl, tree size /* = NULL_TREE */, - bool exact /* = false */, - const wide_int bndrng[2] /* = NULL */) -{ - if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg)) - return; - - loc = expansion_point_location_if_in_system_header (loc); - bool warned; - - /* Format the bound range as a string to keep the nuber of messages - from exploding. */ - char bndstr[80]; - *bndstr = 0; - if (bndrng) - { - if (bndrng[0] == bndrng[1]) - sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ()); - else - sprintf (bndstr, "[%llu, %llu]", - (unsigned long long) bndrng[0].to_uhwi (), - (unsigned long long) bndrng[1].to_uhwi ()); - } - - const tree maxobjsize = max_object_size (); - const wide_int maxsiz = wi::to_wide (maxobjsize); - if (expr) - { - tree func = get_callee_fndecl (expr); - if (bndrng) - { - if (wi::ltu_p (maxsiz, bndrng[0])) - warned = warning_at (loc, OPT_Wstringop_overread, - "%K%qD specified bound %s exceeds " - "maximum object size %E", - expr, func, bndstr, maxobjsize); - else - { - bool maybe = wi::to_wide (size) == bndrng[0]; - warned = warning_at (loc, OPT_Wstringop_overread, - exact - ? G_("%K%qD specified bound %s exceeds " - "the size %E of unterminated array") - : (maybe - ? G_("%K%qD specified bound %s may " - "exceed the size of at most %E " - "of unterminated array") - : G_("%K%qD specified bound %s exceeds " - "the size of at most %E " - "of unterminated array")), - expr, func, bndstr, size); - } - } - else - warned = warning_at (loc, OPT_Wstringop_overread, - "%K%qD argument missing terminating nul", - expr, func); - } - else - { - if (bndrng) - { - if (wi::ltu_p (maxsiz, bndrng[0])) - warned = warning_at (loc, OPT_Wstringop_overread, - "%qs specified bound %s exceeds " - "maximum object size %E", - fname, bndstr, maxobjsize); - else - { - bool maybe = wi::to_wide (size) == bndrng[0]; - warned = warning_at (loc, OPT_Wstringop_overread, - exact - ? G_("%qs specified bound %s exceeds " - "the size %E of unterminated array") - : (maybe - ? G_("%qs specified bound %s may " - "exceed the size of at most %E " - "of unterminated array") - : G_("%qs specified bound %s exceeds " - "the size of at most %E " - "of unterminated array")), - fname, bndstr, size); - } - } - else - warned = warning_at (loc, OPT_Wstringop_overread, - "%qs argument missing terminating nul", - fname); - } - - if (warned) - { - inform (DECL_SOURCE_LOCATION (decl), - "referenced argument declared here"); - TREE_NO_WARNING (arg) = 1; - if (expr) - TREE_NO_WARNING (expr) = 1; - } -} - -/* For a call EXPR (which may be null) that expects a string argument - SRC as an argument, returns false if SRC is a character array with - no terminating NUL. When nonnull, BOUND is the number of characters - in which to expect the terminating NUL. RDONLY is true for read-only - accesses such as strcmp, false for read-write such as strcpy. When - EXPR is also issues a warning. */ - -bool -check_nul_terminated_array (tree expr, tree src, - tree bound /* = NULL_TREE */) -{ - /* The constant size of the array SRC points to. The actual size - may be less of EXACT is true, but not more. */ - tree size; - /* True if SRC involves a non-constant offset into the array. */ - bool exact; - /* The unterminated constant array SRC points to. */ - tree nonstr = unterminated_array (src, &size, &exact); - if (!nonstr) - return true; - - /* NONSTR refers to the non-nul terminated constant array and SIZE - is the constant size of the array in bytes. EXACT is true when - SIZE is exact. */ - - wide_int bndrng[2]; - if (bound) - { - if (TREE_CODE (bound) == INTEGER_CST) - bndrng[0] = bndrng[1] = wi::to_wide (bound); - else - { - value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1); - if (rng != VR_RANGE) - return true; - } - - if (exact) - { - if (wi::leu_p (bndrng[0], wi::to_wide (size))) - return true; - } - else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED)) - return true; - } - - if (expr) - warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr, - size, exact, bound ? bndrng : NULL); - - return false; -} - -/* If EXP refers to an unterminated constant character array return - the declaration of the object of which the array is a member or - element and if SIZE is not null, set *SIZE to the size of - the unterminated array and set *EXACT if the size is exact or - clear it otherwise. Otherwise return null. */ - -tree -unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */) -{ - /* C_STRLEN will return NULL and set DECL in the info - structure if EXP references a unterminated array. */ - c_strlen_data lendata = { }; - tree len = c_strlen (exp, 1, &lendata); - if (len == NULL_TREE && lendata.minlen && lendata.decl) - { - if (size) - { - len = lendata.minlen; - if (lendata.off) - { - /* Constant offsets are already accounted for in LENDATA.MINLEN, - but not in a SSA_NAME + CST expression. */ - if (TREE_CODE (lendata.off) == INTEGER_CST) - *exact = true; - else if (TREE_CODE (lendata.off) == PLUS_EXPR - && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST) - { - /* Subtract the offset from the size of the array. */ - *exact = false; - tree temp = TREE_OPERAND (lendata.off, 1); - temp = fold_convert (ssizetype, temp); - len = fold_build2 (MINUS_EXPR, ssizetype, len, temp); - } - else - *exact = false; - } - else - *exact = true; - - *size = len; - } - return lendata.decl; - } - - return NULL_TREE; -} - /* Compute the length of a null-terminated character string or wide character string handling character sizes of 1, 2, and 4 bytes. TREE_STRING_LENGTH is not the right way because it evaluates to @@ -1445,14 +693,14 @@ c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize) { /* Suppress multiple warnings for propagated constant strings. */ if (only_value != 2 - && !TREE_NO_WARNING (arg) + && !warning_suppressed_p (arg, OPT_Warray_bounds) && warning_at (loc, OPT_Warray_bounds, "offset %qwi outside bounds of constant string", eltoff)) { if (decl) inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl); - TREE_NO_WARNING (arg) = 1; + suppress_warning (arg, OPT_Warray_bounds); } return NULL_TREE; } @@ -2490,8 +1738,12 @@ expand_builtin_apply (rtx function, rtx arguments, rtx argsize) if (targetm.have_untyped_call ()) { rtx mem = gen_rtx_MEM (FUNCTION_MODE, function); - emit_call_insn (targetm.gen_untyped_call (mem, result, - result_vector (1, result))); + rtx_insn *seq = targetm.gen_untyped_call (mem, result, + result_vector (1, result)); + for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn)) + if (CALL_P (insn)) + add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX); + emit_insn (seq); } else if (targetm.have_call_value ()) { @@ -3698,8 +2950,6 @@ expand_builtin_strlen (tree exp, rtx target, return NULL_RTX; tree src = CALL_EXPR_ARG (exp, 0); - if (!check_read_access (exp, src)) - return NULL_RTX; /* If the length can be computed at compile-time, return it. */ if (tree len = c_strlen (src, 0)) @@ -3803,8 +3053,6 @@ expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode) if (!bound) return NULL_RTX; - check_read_access (exp, src, bound); - location_t loc = UNKNOWN_LOCATION; if (EXPR_HAS_LOCATION (exp)) loc = EXPR_LOCATION (exp); @@ -3829,9 +3077,12 @@ expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode) return NULL_RTX; wide_int min, max; - enum value_range_kind rng = get_range_info (bound, &min, &max); - if (rng != VR_RANGE) + value_range r; + get_global_range_query ()->range_of_expr (r, bound); + if (r.kind () != VR_RANGE) return NULL_RTX; + min = r.lower_bound (); + max = r.upper_bound (); if (!len || TREE_CODE (len) != INTEGER_CST) { @@ -3856,14 +3107,17 @@ expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode) a target constant. */ static rtx -builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset, - scalar_int_mode mode) +builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset, + fixed_size_mode mode) { /* The REPresentation pointed to by DATA need not be a nul-terminated string but the caller guarantees it's large enough for MODE. */ const char *rep = (const char *) data; - return c_readstr (rep + offset, mode, /*nul_terminated=*/false); + /* The by-pieces infrastructure does not try to pick a vector mode + for memcpy expansion. */ + return c_readstr (rep + offset, as_a <scalar_int_mode> (mode), + /*nul_terminated=*/false); } /* LEN specify length of the block of memcpy/memset operation. @@ -3899,7 +3153,16 @@ determine_block_size (tree len, rtx len_rtx, *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx)); if (TREE_CODE (len) == SSA_NAME) - range_type = get_range_info (len, &min, &max); + { + value_range r; + get_global_range_query ()->range_of_expr (r, len); + range_type = r.kind (); + if (range_type != VR_UNDEFINED) + { + min = wi::to_wide (r.min ()); + max = wi::to_wide (r.max ()); + } + } if (range_type == VR_RANGE) { if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ()) @@ -3927,1964 +3190,6 @@ determine_block_size (tree len, rtx len_rtx, GET_MODE_MASK (GET_MODE (len_rtx))); } -/* Issue a warning OPT for a bounded call EXP with a bound in RANGE - accessing an object with SIZE. */ - -static bool -maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func, - tree bndrng[2], tree size, const access_data *pad = NULL) -{ - if (!bndrng[0] || TREE_NO_WARNING (exp)) - return false; - - tree maxobjsize = max_object_size (); - - bool warned = false; - - if (opt == OPT_Wstringop_overread) - { - bool maybe = pad && pad->src.phi (); - - if (tree_int_cst_lt (maxobjsize, bndrng[0])) - { - if (bndrng[0] == bndrng[1]) - warned = (func - ? warning_at (loc, opt, - (maybe - ? G_("%K%qD specified bound %E may " - "exceed maximum object size %E") - : G_("%K%qD specified bound %E " - "exceeds maximum object size %E")), - exp, func, bndrng[0], maxobjsize) - : warning_at (loc, opt, - (maybe - ? G_("%Kspecified bound %E may " - "exceed maximum object size %E") - : G_("%Kspecified bound %E " - "exceeds maximum object size %E")), - exp, bndrng[0], maxobjsize)); - else - warned = (func - ? warning_at (loc, opt, - (maybe - ? G_("%K%qD specified bound [%E, %E] may " - "exceed maximum object size %E") - : G_("%K%qD specified bound [%E, %E] " - "exceeds maximum object size %E")), - exp, func, - bndrng[0], bndrng[1], maxobjsize) - : warning_at (loc, opt, - (maybe - ? G_("%Kspecified bound [%E, %E] may " - "exceed maximum object size %E") - : G_("%Kspecified bound [%E, %E] " - "exceeds maximum object size %E")), - exp, bndrng[0], bndrng[1], maxobjsize)); - } - else if (!size || tree_int_cst_le (bndrng[0], size)) - return false; - else if (tree_int_cst_equal (bndrng[0], bndrng[1])) - warned = (func - ? warning_at (loc, opt, - (maybe - ? G_("%K%qD specified bound %E may exceed " - "source size %E") - : G_("%K%qD specified bound %E exceeds " - "source size %E")), - exp, func, bndrng[0], size) - : warning_at (loc, opt, - (maybe - ? G_("%Kspecified bound %E may exceed " - "source size %E") - : G_("%Kspecified bound %E exceeds " - "source size %E")), - exp, bndrng[0], size)); - else - warned = (func - ? warning_at (loc, opt, - (maybe - ? G_("%K%qD specified bound [%E, %E] may " - "exceed source size %E") - : G_("%K%qD specified bound [%E, %E] exceeds " - "source size %E")), - exp, func, bndrng[0], bndrng[1], size) - : warning_at (loc, opt, - (maybe - ? G_("%Kspecified bound [%E, %E] may exceed " - "source size %E") - : G_("%Kspecified bound [%E, %E] exceeds " - "source size %E")), - exp, bndrng[0], bndrng[1], size)); - if (warned) - { - if (pad && pad->src.ref) - { - if (DECL_P (pad->src.ref)) - inform (DECL_SOURCE_LOCATION (pad->src.ref), - "source object declared here"); - else if (EXPR_HAS_LOCATION (pad->src.ref)) - inform (EXPR_LOCATION (pad->src.ref), - "source object allocated here"); - } - TREE_NO_WARNING (exp) = true; - } - - return warned; - } - - bool maybe = pad && pad->dst.phi (); - if (tree_int_cst_lt (maxobjsize, bndrng[0])) - { - if (bndrng[0] == bndrng[1]) - warned = (func - ? warning_at (loc, opt, - (maybe - ? G_("%K%qD specified size %E may " - "exceed maximum object size %E") - : G_("%K%qD specified size %E " - "exceeds maximum object size %E")), - exp, func, bndrng[0], maxobjsize) - : warning_at (loc, opt, - (maybe - ? G_("%Kspecified size %E may exceed " - "maximum object size %E") - : G_("%Kspecified size %E exceeds " - "maximum object size %E")), - exp, bndrng[0], maxobjsize)); - else - warned = (func - ? warning_at (loc, opt, - (maybe - ? G_("%K%qD specified size between %E and %E " - "may exceed maximum object size %E") - : G_("%K%qD specified size between %E and %E " - "exceeds maximum object size %E")), - exp, func, - bndrng[0], bndrng[1], maxobjsize) - : warning_at (loc, opt, - (maybe - ? G_("%Kspecified size between %E and %E " - "may exceed maximum object size %E") - : G_("%Kspecified size between %E and %E " - "exceeds maximum object size %E")), - exp, bndrng[0], bndrng[1], maxobjsize)); - } - else if (!size || tree_int_cst_le (bndrng[0], size)) - return false; - else if (tree_int_cst_equal (bndrng[0], bndrng[1])) - warned = (func - ? warning_at (loc, OPT_Wstringop_overflow_, - (maybe - ? G_("%K%qD specified bound %E may exceed " - "destination size %E") - : G_("%K%qD specified bound %E exceeds " - "destination size %E")), - exp, func, bndrng[0], size) - : warning_at (loc, OPT_Wstringop_overflow_, - (maybe - ? G_("%Kspecified bound %E may exceed " - "destination size %E") - : G_("%Kspecified bound %E exceeds " - "destination size %E")), - exp, bndrng[0], size)); - else - warned = (func - ? warning_at (loc, OPT_Wstringop_overflow_, - (maybe - ? G_("%K%qD specified bound [%E, %E] may exceed " - "destination size %E") - : G_("%K%qD specified bound [%E, %E] exceeds " - "destination size %E")), - exp, func, bndrng[0], bndrng[1], size) - : warning_at (loc, OPT_Wstringop_overflow_, - (maybe - ? G_("%Kspecified bound [%E, %E] exceeds " - "destination size %E") - : G_("%Kspecified bound [%E, %E] exceeds " - "destination size %E")), - exp, bndrng[0], bndrng[1], size)); - - if (warned) - { - if (pad && pad->dst.ref) - { - if (DECL_P (pad->dst.ref)) - inform (DECL_SOURCE_LOCATION (pad->dst.ref), - "destination object declared here"); - else if (EXPR_HAS_LOCATION (pad->dst.ref)) - inform (EXPR_LOCATION (pad->dst.ref), - "destination object allocated here"); - } - TREE_NO_WARNING (exp) = true; - } - - return warned; -} - -/* For an expression EXP issue an access warning controlled by option OPT - with access to a region SIZE bytes in size in the RANGE of sizes. - WRITE is true for a write access, READ for a read access, neither for - call that may or may not perform an access but for which the range - is expected to valid. - Returns true when a warning has been issued. */ - -static bool -warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2], - tree size, bool write, bool read, bool maybe) -{ - bool warned = false; - - if (write && read) - { - if (tree_int_cst_equal (range[0], range[1])) - warned = (func - ? warning_n (loc, opt, tree_to_uhwi (range[0]), - (maybe - ? G_("%K%qD may access %E byte in a region " - "of size %E") - : G_("%K%qD accessing %E byte in a region " - "of size %E")), - (maybe - ? G_ ("%K%qD may access %E bytes in a region " - "of size %E") - : G_ ("%K%qD accessing %E bytes in a region " - "of size %E")), - exp, func, range[0], size) - : warning_n (loc, opt, tree_to_uhwi (range[0]), - (maybe - ? G_("%Kmay access %E byte in a region " - "of size %E") - : G_("%Kaccessing %E byte in a region " - "of size %E")), - (maybe - ? G_("%Kmay access %E bytes in a region " - "of size %E") - : G_("%Kaccessing %E bytes in a region " - "of size %E")), - exp, range[0], size)); - else if (tree_int_cst_sign_bit (range[1])) - { - /* Avoid printing the upper bound if it's invalid. */ - warned = (func - ? warning_at (loc, opt, - (maybe - ? G_("%K%qD may access %E or more bytes " - "in a region of size %E") - : G_("%K%qD accessing %E or more bytes " - "in a region of size %E")), - exp, func, range[0], size) - : warning_at (loc, opt, - (maybe - ? G_("%Kmay access %E or more bytes " - "in a region of size %E") - : G_("%Kaccessing %E or more bytes " - "in a region of size %E")), - exp, range[0], size)); - } - else - warned = (func - ? warning_at (loc, opt, - (maybe - ? G_("%K%qD may access between %E and %E " - "bytes in a region of size %E") - : G_("%K%qD accessing between %E and %E " - "bytes in a region of size %E")), - exp, func, range[0], range[1], - size) - : warning_at (loc, opt, - (maybe - ? G_("%Kmay access between %E and %E bytes " - "in a region of size %E") - : G_("%Kaccessing between %E and %E bytes " - "in a region of size %E")), - exp, range[0], range[1], - size)); - return warned; - } - - if (write) - { - if (tree_int_cst_equal (range[0], range[1])) - warned = (func - ? warning_n (loc, opt, tree_to_uhwi (range[0]), - (maybe - ? G_("%K%qD may write %E byte into a region " - "of size %E") - : G_("%K%qD writing %E byte into a region " - "of size %E overflows the destination")), - (maybe - ? G_("%K%qD may write %E bytes into a region " - "of size %E") - : G_("%K%qD writing %E bytes into a region " - "of size %E overflows the destination")), - exp, func, range[0], size) - : warning_n (loc, opt, tree_to_uhwi (range[0]), - (maybe - ? G_("%Kmay write %E byte into a region " - "of size %E") - : G_("%Kwriting %E byte into a region " - "of size %E overflows the destination")), - (maybe - ? G_("%Kmay write %E bytes into a region " - "of size %E") - : G_("%Kwriting %E bytes into a region " - "of size %E overflows the destination")), - exp, range[0], size)); - else if (tree_int_cst_sign_bit (range[1])) - { - /* Avoid printing the upper bound if it's invalid. */ - warned = (func - ? warning_at (loc, opt, - (maybe - ? G_("%K%qD may write %E or more bytes " - "into a region of size %E") - : G_("%K%qD writing %E or more bytes " - "into a region of size %E overflows " - "the destination")), - exp, func, range[0], size) - : warning_at (loc, opt, - (maybe - ? G_("%Kmay write %E or more bytes into " - "a region of size %E") - : G_("%Kwriting %E or more bytes into " - "a region of size %E overflows " - "the destination")), - exp, range[0], size)); - } - else - warned = (func - ? warning_at (loc, opt, - (maybe - ? G_("%K%qD may write between %E and %E bytes " - "into a region of size %E") - : G_("%K%qD writing between %E and %E bytes " - "into a region of size %E overflows " - "the destination")), - exp, func, range[0], range[1], - size) - : warning_at (loc, opt, - (maybe - ? G_("%Kmay write between %E and %E bytes " - "into a region of size %E") - : G_("%Kwriting between %E and %E bytes " - "into a region of size %E overflows " - "the destination")), - exp, range[0], range[1], - size)); - return warned; - } - - if (read) - { - if (tree_int_cst_equal (range[0], range[1])) - warned = (func - ? warning_n (loc, OPT_Wstringop_overread, - tree_to_uhwi (range[0]), - (maybe - ? G_("%K%qD may read %E byte from a region " - "of size %E") - : G_("%K%qD reading %E byte from a region " - "of size %E")), - (maybe - ? G_("%K%qD may read %E bytes from a region " - "of size %E") - : G_("%K%qD reading %E bytes from a region " - "of size %E")), - exp, func, range[0], size) - : warning_n (loc, OPT_Wstringop_overread, - tree_to_uhwi (range[0]), - (maybe - ? G_("%Kmay read %E byte from a region " - "of size %E") - : G_("%Kreading %E byte from a region " - "of size %E")), - (maybe - ? G_("%Kmay read %E bytes from a region " - "of size %E") - : G_("%Kreading %E bytes from a region " - "of size %E")), - exp, range[0], size)); - else if (tree_int_cst_sign_bit (range[1])) - { - /* Avoid printing the upper bound if it's invalid. */ - warned = (func - ? warning_at (loc, OPT_Wstringop_overread, - (maybe - ? G_("%K%qD may read %E or more bytes " - "from a region of size %E") - : G_("%K%qD reading %E or more bytes " - "from a region of size %E")), - exp, func, range[0], size) - : warning_at (loc, OPT_Wstringop_overread, - (maybe - ? G_("%Kmay read %E or more bytes " - "from a region of size %E") - : G_("%Kreading %E or more bytes " - "from a region of size %E")), - exp, range[0], size)); - } - else - warned = (func - ? warning_at (loc, OPT_Wstringop_overread, - (maybe - ? G_("%K%qD may read between %E and %E bytes " - "from a region of size %E") - : G_("%K%qD reading between %E and %E bytes " - "from a region of size %E")), - exp, func, range[0], range[1], size) - : warning_at (loc, opt, - (maybe - ? G_("%Kmay read between %E and %E bytes " - "from a region of size %E") - : G_("%Kreading between %E and %E bytes " - "from a region of size %E")), - exp, range[0], range[1], size)); - - if (warned) - TREE_NO_WARNING (exp) = true; - - return warned; - } - - if (tree_int_cst_equal (range[0], range[1]) - || tree_int_cst_sign_bit (range[1])) - warned = (func - ? warning_n (loc, OPT_Wstringop_overread, - tree_to_uhwi (range[0]), - "%K%qD expecting %E byte in a region of size %E", - "%K%qD expecting %E bytes in a region of size %E", - exp, func, range[0], size) - : warning_n (loc, OPT_Wstringop_overread, - tree_to_uhwi (range[0]), - "%Kexpecting %E byte in a region of size %E", - "%Kexpecting %E bytes in a region of size %E", - exp, range[0], size)); - else if (tree_int_cst_sign_bit (range[1])) - { - /* Avoid printing the upper bound if it's invalid. */ - warned = (func - ? warning_at (loc, OPT_Wstringop_overread, - "%K%qD expecting %E or more bytes in a region " - "of size %E", - exp, func, range[0], size) - : warning_at (loc, OPT_Wstringop_overread, - "%Kexpecting %E or more bytes in a region " - "of size %E", - exp, range[0], size)); - } - else - warned = (func - ? warning_at (loc, OPT_Wstringop_overread, - "%K%qD expecting between %E and %E bytes in " - "a region of size %E", - exp, func, range[0], range[1], size) - : warning_at (loc, OPT_Wstringop_overread, - "%Kexpecting between %E and %E bytes in " - "a region of size %E", - exp, range[0], range[1], size)); - - if (warned) - TREE_NO_WARNING (exp) = true; - - return warned; -} - -/* Issue one inform message describing each target of an access REF. - WRITE is set for a write access and clear for a read access. */ - -void -access_ref::inform_access (access_mode mode) const -{ - const access_ref &aref = *this; - if (!aref.ref) - return; - - if (aref.phi ()) - { - /* Set MAXREF to refer to the largest object and fill ALL_REFS - with data for all objects referenced by the PHI arguments. */ - access_ref maxref; - auto_vec<access_ref> all_refs; - if (!get_ref (&all_refs, &maxref)) - return; - - /* Except for MAXREF, the rest of the arguments' offsets need not - reflect one added to the PHI itself. Determine the latter from - MAXREF on which the result is based. */ - const offset_int orng[] = - { - offrng[0] - maxref.offrng[0], - wi::smax (offrng[1] - maxref.offrng[1], offrng[0]), - }; - - /* Add the final PHI's offset to that of each of the arguments - and recurse to issue an inform message for it. */ - for (unsigned i = 0; i != all_refs.length (); ++i) - { - /* Skip any PHIs; those could lead to infinite recursion. */ - if (all_refs[i].phi ()) - continue; - - all_refs[i].add_offset (orng[0], orng[1]); - all_refs[i].inform_access (mode); - } - return; - } - - /* Convert offset range and avoid including a zero range since it - isn't necessarily meaningful. */ - HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node)); - HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node)); - HOST_WIDE_INT minoff; - HOST_WIDE_INT maxoff = diff_max; - if (wi::fits_shwi_p (aref.offrng[0])) - minoff = aref.offrng[0].to_shwi (); - else - minoff = aref.offrng[0] < 0 ? diff_min : diff_max; - - if (wi::fits_shwi_p (aref.offrng[1])) - maxoff = aref.offrng[1].to_shwi (); - - if (maxoff <= diff_min || maxoff >= diff_max) - /* Avoid mentioning an upper bound that's equal to or in excess - of the maximum of ptrdiff_t. */ - maxoff = minoff; - - /* Convert size range and always include it since all sizes are - meaningful. */ - unsigned long long minsize = 0, maxsize = 0; - if (wi::fits_shwi_p (aref.sizrng[0]) - && wi::fits_shwi_p (aref.sizrng[1])) - { - minsize = aref.sizrng[0].to_shwi (); - maxsize = aref.sizrng[1].to_shwi (); - } - - /* SIZRNG doesn't necessarily have the same range as the allocation - size determined by gimple_call_alloc_size (). */ - char sizestr[80]; - if (minsize == maxsize) - sprintf (sizestr, "%llu", minsize); - else - sprintf (sizestr, "[%llu, %llu]", minsize, maxsize); - - char offstr[80]; - if (minoff == 0 - && (maxoff == 0 || aref.sizrng[1] <= maxoff)) - offstr[0] = '\0'; - else if (minoff == maxoff) - sprintf (offstr, "%lli", (long long) minoff); - else - sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff); - - location_t loc = UNKNOWN_LOCATION; - - tree ref = this->ref; - tree allocfn = NULL_TREE; - if (TREE_CODE (ref) == SSA_NAME) - { - gimple *stmt = SSA_NAME_DEF_STMT (ref); - if (is_gimple_call (stmt)) - { - loc = gimple_location (stmt); - if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)) - { - /* Strip the SSA_NAME suffix from the variable name and - recreate an identifier with the VLA's original name. */ - ref = gimple_call_lhs (stmt); - if (SSA_NAME_IDENTIFIER (ref)) - { - ref = SSA_NAME_IDENTIFIER (ref); - const char *id = IDENTIFIER_POINTER (ref); - size_t len = strcspn (id, ".$"); - if (!len) - len = strlen (id); - ref = get_identifier_with_length (id, len); - } - } - else - { - /* Except for VLAs, retrieve the allocation function. */ - allocfn = gimple_call_fndecl (stmt); - if (!allocfn) - allocfn = gimple_call_fn (stmt); - if (TREE_CODE (allocfn) == SSA_NAME) - { - /* For an ALLOC_CALL via a function pointer make a small - effort to determine the destination of the pointer. */ - gimple *def = SSA_NAME_DEF_STMT (allocfn); - if (gimple_assign_single_p (def)) - { - tree rhs = gimple_assign_rhs1 (def); - if (DECL_P (rhs)) - allocfn = rhs; - else if (TREE_CODE (rhs) == COMPONENT_REF) - allocfn = TREE_OPERAND (rhs, 1); - } - } - } - } - else if (gimple_nop_p (stmt)) - /* Handle DECL_PARM below. */ - ref = SSA_NAME_VAR (ref); - } - - if (DECL_P (ref)) - loc = DECL_SOURCE_LOCATION (ref); - else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref)) - loc = EXPR_LOCATION (ref); - else if (TREE_CODE (ref) != IDENTIFIER_NODE - && TREE_CODE (ref) != SSA_NAME) - return; - - if (mode == access_read_write || mode == access_write_only) - { - if (allocfn == NULL_TREE) - { - if (*offstr) - inform (loc, "at offset %s into destination object %qE of size %s", - offstr, ref, sizestr); - else - inform (loc, "destination object %qE of size %s", ref, sizestr); - return; - } - - if (*offstr) - inform (loc, - "at offset %s into destination object of size %s " - "allocated by %qE", offstr, sizestr, allocfn); - else - inform (loc, "destination object of size %s allocated by %qE", - sizestr, allocfn); - return; - } - - if (allocfn == NULL_TREE) - { - if (*offstr) - inform (loc, "at offset %s into source object %qE of size %s", - offstr, ref, sizestr); - else - inform (loc, "source object %qE of size %s", ref, sizestr); - - return; - } - - if (*offstr) - inform (loc, - "at offset %s into source object of size %s allocated by %qE", - offstr, sizestr, allocfn); - else - inform (loc, "source object of size %s allocated by %qE", - sizestr, allocfn); -} - -/* Helper to set RANGE to the range of BOUND if it's nonnull, bounded - by BNDRNG if nonnull and valid. */ - -static void -get_size_range (tree bound, tree range[2], const offset_int bndrng[2]) -{ - if (bound) - get_size_range (bound, range); - - if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U)) - return; - - if (range[0] && TREE_CODE (range[0]) == INTEGER_CST) - { - offset_int r[] = - { wi::to_offset (range[0]), wi::to_offset (range[1]) }; - if (r[0] < bndrng[0]) - range[0] = wide_int_to_tree (sizetype, bndrng[0]); - if (bndrng[1] < r[1]) - range[1] = wide_int_to_tree (sizetype, bndrng[1]); - } - else - { - range[0] = wide_int_to_tree (sizetype, bndrng[0]); - range[1] = wide_int_to_tree (sizetype, bndrng[1]); - } -} - -/* Try to verify that the sizes and lengths of the arguments to a string - manipulation function given by EXP are within valid bounds and that - the operation does not lead to buffer overflow or read past the end. - Arguments other than EXP may be null. When non-null, the arguments - have the following meaning: - DST is the destination of a copy call or NULL otherwise. - SRC is the source of a copy call or NULL otherwise. - DSTWRITE is the number of bytes written into the destination obtained - from the user-supplied size argument to the function (such as in - memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE). - MAXREAD is the user-supplied bound on the length of the source sequence - (such as in strncat(d, s, N). It specifies the upper limit on the number - of bytes to write. If NULL, it's taken to be the same as DSTWRITE. - SRCSTR is the source string (such as in strcpy(DST, SRC)) when the - expression EXP is a string function call (as opposed to a memory call - like memcpy). As an exception, SRCSTR can also be an integer denoting - the precomputed size of the source string or object (for functions like - memcpy). - DSTSIZE is the size of the destination object. - - When DSTWRITE is null LEN is checked to verify that it doesn't exceed - SIZE_MAX. - - WRITE is true for write accesses, READ is true for reads. Both are - false for simple size checks in calls to functions that neither read - from nor write to the region. - - When nonnull, PAD points to a more detailed description of the access. - - If the call is successfully verified as safe return true, otherwise - return false. */ - -bool -check_access (tree exp, tree dstwrite, - tree maxread, tree srcstr, tree dstsize, - access_mode mode, const access_data *pad /* = NULL */) -{ - /* The size of the largest object is half the address space, or - PTRDIFF_MAX. (This is way too permissive.) */ - tree maxobjsize = max_object_size (); - - /* Either an approximate/minimum the length of the source string for - string functions or the size of the source object for raw memory - functions. */ - tree slen = NULL_TREE; - - /* The range of the access in bytes; first set to the write access - for functions that write and then read for those that also (or - just) read. */ - tree range[2] = { NULL_TREE, NULL_TREE }; - - /* Set to true when the exact number of bytes written by a string - function like strcpy is not known and the only thing that is - known is that it must be at least one (for the terminating nul). */ - bool at_least_one = false; - if (srcstr) - { - /* SRCSTR is normally a pointer to string but as a special case - it can be an integer denoting the length of a string. */ - if (POINTER_TYPE_P (TREE_TYPE (srcstr))) - { - if (!check_nul_terminated_array (exp, srcstr, maxread)) - return false; - /* Try to determine the range of lengths the source string - refers to. If it can be determined and is less than - the upper bound given by MAXREAD add one to it for - the terminating nul. Otherwise, set it to one for - the same reason, or to MAXREAD as appropriate. */ - c_strlen_data lendata = { }; - get_range_strlen (srcstr, &lendata, /* eltsize = */ 1); - range[0] = lendata.minlen; - range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen; - if (range[0] - && TREE_CODE (range[0]) == INTEGER_CST - && TREE_CODE (range[1]) == INTEGER_CST - && (!maxread || TREE_CODE (maxread) == INTEGER_CST)) - { - if (maxread && tree_int_cst_le (maxread, range[0])) - range[0] = range[1] = maxread; - else - range[0] = fold_build2 (PLUS_EXPR, size_type_node, - range[0], size_one_node); - - if (maxread && tree_int_cst_le (maxread, range[1])) - range[1] = maxread; - else if (!integer_all_onesp (range[1])) - range[1] = fold_build2 (PLUS_EXPR, size_type_node, - range[1], size_one_node); - - slen = range[0]; - } - else - { - at_least_one = true; - slen = size_one_node; - } - } - else - slen = srcstr; - } - - if (!dstwrite && !maxread) - { - /* When the only available piece of data is the object size - there is nothing to do. */ - if (!slen) - return true; - - /* Otherwise, when the length of the source sequence is known - (as with strlen), set DSTWRITE to it. */ - if (!range[0]) - dstwrite = slen; - } - - if (!dstsize) - dstsize = maxobjsize; - - /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG - if valid. */ - get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL); - - tree func = get_callee_fndecl (exp); - /* Read vs write access by built-ins can be determined from the const - qualifiers on the pointer argument. In the absence of attribute - access, non-const qualified pointer arguments to user-defined - functions are assumed to both read and write the objects. */ - const bool builtin = func ? fndecl_built_in_p (func) : false; - - /* First check the number of bytes to be written against the maximum - object size. */ - if (range[0] - && TREE_CODE (range[0]) == INTEGER_CST - && tree_int_cst_lt (maxobjsize, range[0])) - { - location_t loc = tree_inlined_location (exp); - maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range, - NULL_TREE, pad); - return false; - } - - /* The number of bytes to write is "exact" if DSTWRITE is non-null, - constant, and in range of unsigned HOST_WIDE_INT. */ - bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite); - - /* Next check the number of bytes to be written against the destination - object size. */ - if (range[0] || !exactwrite || integer_all_onesp (dstwrite)) - { - if (range[0] - && TREE_CODE (range[0]) == INTEGER_CST - && ((tree_fits_uhwi_p (dstsize) - && tree_int_cst_lt (dstsize, range[0])) - || (dstwrite - && tree_fits_uhwi_p (dstwrite) - && tree_int_cst_lt (dstwrite, range[0])))) - { - if (TREE_NO_WARNING (exp) - || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref))) - return false; - - location_t loc = tree_inlined_location (exp); - bool warned = false; - if (dstwrite == slen && at_least_one) - { - /* This is a call to strcpy with a destination of 0 size - and a source of unknown length. The call will write - at least one byte past the end of the destination. */ - warned = (func - ? warning_at (loc, OPT_Wstringop_overflow_, - "%K%qD writing %E or more bytes into " - "a region of size %E overflows " - "the destination", - exp, func, range[0], dstsize) - : warning_at (loc, OPT_Wstringop_overflow_, - "%Kwriting %E or more bytes into " - "a region of size %E overflows " - "the destination", - exp, range[0], dstsize)); - } - else - { - const bool read - = mode == access_read_only || mode == access_read_write; - const bool write - = mode == access_write_only || mode == access_read_write; - const bool maybe = pad && pad->dst.parmarray; - warned = warn_for_access (loc, func, exp, - OPT_Wstringop_overflow_, - range, dstsize, - write, read && !builtin, maybe); - } - - if (warned) - { - TREE_NO_WARNING (exp) = true; - if (pad) - pad->dst.inform_access (pad->mode); - } - - /* Return error when an overflow has been detected. */ - return false; - } - } - - /* Check the maximum length of the source sequence against the size - of the destination object if known, or against the maximum size - of an object. */ - if (maxread) - { - /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if - PAD is nonnull and BNDRNG is valid. */ - get_size_range (maxread, range, pad ? pad->src.bndrng : NULL); - - location_t loc = tree_inlined_location (exp); - tree size = dstsize; - if (pad && pad->mode == access_read_only) - size = wide_int_to_tree (sizetype, pad->src.sizrng[1]); - - if (range[0] && maxread && tree_fits_uhwi_p (size)) - { - if (tree_int_cst_lt (maxobjsize, range[0])) - { - maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func, - range, size, pad); - return false; - } - - if (size != maxobjsize && tree_int_cst_lt (size, range[0])) - { - int opt = (dstwrite || mode != access_read_only - ? OPT_Wstringop_overflow_ - : OPT_Wstringop_overread); - maybe_warn_for_bound (opt, loc, exp, func, range, size, pad); - return false; - } - } - - maybe_warn_nonstring_arg (func, exp); - } - - /* Check for reading past the end of SRC. */ - bool overread = (slen - && slen == srcstr - && dstwrite - && range[0] - && TREE_CODE (slen) == INTEGER_CST - && tree_int_cst_lt (slen, range[0])); - /* If none is determined try to get a better answer based on the details - in PAD. */ - if (!overread - && pad - && pad->src.sizrng[1] >= 0 - && pad->src.offrng[0] >= 0 - && (pad->src.offrng[1] < 0 - || pad->src.offrng[0] <= pad->src.offrng[1])) - { - /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if - PAD is nonnull and BNDRNG is valid. */ - get_size_range (maxread, range, pad ? pad->src.bndrng : NULL); - /* Set OVERREAD for reads starting just past the end of an object. */ - overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0]; - range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]); - slen = size_zero_node; - } - - if (overread) - { - if (TREE_NO_WARNING (exp) - || (srcstr && TREE_NO_WARNING (srcstr)) - || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref))) - return false; - - location_t loc = tree_inlined_location (exp); - const bool read - = mode == access_read_only || mode == access_read_write; - const bool maybe = pad && pad->dst.parmarray; - if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range, - slen, false, read, maybe)) - { - TREE_NO_WARNING (exp) = true; - if (pad) - pad->src.inform_access (access_read_only); - } - return false; - } - - return true; -} - -/* A convenience wrapper for check_access above to check access - by a read-only function like puts. */ - -static bool -check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */, - int ost /* = 1 */) -{ - if (!warn_stringop_overread) - return true; - - access_data data (exp, access_read_only, NULL_TREE, false, bound, true); - compute_objsize (src, ost, &data.src); - return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound, - /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode, - &data); -} - -/* If STMT is a call to an allocation function, returns the constant - maximum size of the object allocated by the call represented as - sizetype. If nonnull, sets RNG1[] to the range of the size. - When nonnull, uses RVALS for range information, otherwise calls - get_range_info to get it. - Returns null when STMT is not a call to a valid allocation function. */ - -tree -gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */, - range_query * /* = NULL */) -{ - if (!stmt || !is_gimple_call (stmt)) - return NULL_TREE; - - tree allocfntype; - if (tree fndecl = gimple_call_fndecl (stmt)) - allocfntype = TREE_TYPE (fndecl); - else - allocfntype = gimple_call_fntype (stmt); - - if (!allocfntype) - return NULL_TREE; - - unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX; - tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype)); - if (!at) - { - if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)) - return NULL_TREE; - - argidx1 = 0; - } - - unsigned nargs = gimple_call_num_args (stmt); - - if (argidx1 == UINT_MAX) - { - tree atval = TREE_VALUE (at); - if (!atval) - return NULL_TREE; - - argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1; - if (nargs <= argidx1) - return NULL_TREE; - - atval = TREE_CHAIN (atval); - if (atval) - { - argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1; - if (nargs <= argidx2) - return NULL_TREE; - } - } - - tree size = gimple_call_arg (stmt, argidx1); - - wide_int rng1_buf[2]; - /* If RNG1 is not set, use the buffer. */ - if (!rng1) - rng1 = rng1_buf; - - /* Use maximum precision to avoid overflow below. */ - const int prec = ADDR_MAX_PRECISION; - - { - tree r[2]; - /* Determine the largest valid range size, including zero. */ - if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST)) - return NULL_TREE; - rng1[0] = wi::to_wide (r[0], prec); - rng1[1] = wi::to_wide (r[1], prec); - } - - if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST) - return fold_convert (sizetype, size); - - /* To handle ranges do the math in wide_int and return the product - of the upper bounds as a constant. Ignore anti-ranges. */ - tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node; - wide_int rng2[2]; - { - tree r[2]; - /* As above, use the full non-negative range on failure. */ - if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST)) - return NULL_TREE; - rng2[0] = wi::to_wide (r[0], prec); - rng2[1] = wi::to_wide (r[1], prec); - } - - /* Compute products of both bounds for the caller but return the lesser - of SIZE_MAX and the product of the upper bounds as a constant. */ - rng1[0] = rng1[0] * rng2[0]; - rng1[1] = rng1[1] * rng2[1]; - - const tree size_max = TYPE_MAX_VALUE (sizetype); - if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec))) - { - rng1[1] = wi::to_wide (size_max, prec); - return size_max; - } - - return wide_int_to_tree (sizetype, rng1[1]); -} - -/* For an access to an object referenced to by the function parameter PTR - of pointer type, and set RNG[] to the range of sizes of the object - obtainedfrom the attribute access specification for the current function. - Set STATIC_ARRAY if the array parameter has been declared [static]. - Return the function parameter on success and null otherwise. */ - -tree -gimple_parm_array_size (tree ptr, wide_int rng[2], - bool *static_array /* = NULL */) -{ - /* For a function argument try to determine the byte size of the array - from the current function declaratation (e.g., attribute access or - related). */ - tree var = SSA_NAME_VAR (ptr); - if (TREE_CODE (var) != PARM_DECL) - return NULL_TREE; - - const unsigned prec = TYPE_PRECISION (sizetype); - - rdwr_map rdwr_idx; - attr_access *access = get_parm_access (rdwr_idx, var); - if (!access) - return NULL_TREE; - - if (access->sizarg != UINT_MAX) - { - /* TODO: Try to extract the range from the argument based on - those of subsequent assertions or based on known calls to - the current function. */ - return NULL_TREE; - } - - if (!access->minsize) - return NULL_TREE; - - /* Only consider ordinary array bound at level 2 (or above if it's - ever added). */ - if (warn_array_parameter < 2 && !access->static_p) - return NULL_TREE; - - if (static_array) - *static_array = access->static_p; - - rng[0] = wi::zero (prec); - rng[1] = wi::uhwi (access->minsize, prec); - /* Multiply the array bound encoded in the attribute by the size - of what the pointer argument to which it decays points to. */ - tree eltype = TREE_TYPE (TREE_TYPE (ptr)); - tree size = TYPE_SIZE_UNIT (eltype); - if (!size || TREE_CODE (size) != INTEGER_CST) - return NULL_TREE; - - rng[1] *= wi::to_wide (size, prec); - return var; -} - -/* Wrapper around the wide_int overload of get_range that accepts - offset_int instead. For middle end expressions returns the same - result. For a subset of nonconstamt expressions emitted by the front - end determines a more precise range than would be possible otherwise. */ - -static bool -get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals) -{ - offset_int add = 0; - if (TREE_CODE (x) == PLUS_EXPR) - { - /* Handle constant offsets in pointer addition expressions seen - n the front end IL. */ - tree op = TREE_OPERAND (x, 1); - if (TREE_CODE (op) == INTEGER_CST) - { - op = fold_convert (signed_type_for (TREE_TYPE (op)), op); - add = wi::to_offset (op); - x = TREE_OPERAND (x, 0); - } - } - - if (TREE_CODE (x) == NOP_EXPR) - /* Also handle conversions to sizetype seen in the front end IL. */ - x = TREE_OPERAND (x, 0); - - tree type = TREE_TYPE (x); - if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) - return false; - - if (TREE_CODE (x) != INTEGER_CST - && TREE_CODE (x) != SSA_NAME) - { - if (TYPE_UNSIGNED (type) - && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype)) - type = signed_type_for (type); - - r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add; - r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add; - return x; - } - - wide_int wr[2]; - if (!get_range (x, stmt, wr, rvals)) - return false; - - signop sgn = SIGNED; - /* Only convert signed integers or unsigned sizetype to a signed - offset and avoid converting large positive values in narrower - types to negative offsets. */ - if (TYPE_UNSIGNED (type) - && wr[0].get_precision () < TYPE_PRECISION (sizetype)) - sgn = UNSIGNED; - - r[0] = offset_int::from (wr[0], sgn); - r[1] = offset_int::from (wr[1], sgn); - return true; -} - -/* Return the argument that the call STMT to a built-in function returns - or null if it doesn't. On success, set OFFRNG[] to the range of offsets - from the argument reflected in the value returned by the built-in if it - can be determined, otherwise to 0 and HWI_M1U respectively. */ - -static tree -gimple_call_return_array (gimple *stmt, offset_int offrng[2], - range_query *rvals) -{ - if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL) - || gimple_call_num_args (stmt) < 1) - return NULL_TREE; - - tree fn = gimple_call_fndecl (stmt); - switch (DECL_FUNCTION_CODE (fn)) - { - case BUILT_IN_MEMCPY: - case BUILT_IN_MEMCPY_CHK: - case BUILT_IN_MEMMOVE: - case BUILT_IN_MEMMOVE_CHK: - case BUILT_IN_MEMSET: - case BUILT_IN_STPCPY: - case BUILT_IN_STPCPY_CHK: - case BUILT_IN_STPNCPY: - case BUILT_IN_STPNCPY_CHK: - case BUILT_IN_STRCAT: - case BUILT_IN_STRCAT_CHK: - case BUILT_IN_STRCPY: - case BUILT_IN_STRCPY_CHK: - case BUILT_IN_STRNCAT: - case BUILT_IN_STRNCAT_CHK: - case BUILT_IN_STRNCPY: - case BUILT_IN_STRNCPY_CHK: - offrng[0] = offrng[1] = 0; - return gimple_call_arg (stmt, 0); - - case BUILT_IN_MEMPCPY: - case BUILT_IN_MEMPCPY_CHK: - { - tree off = gimple_call_arg (stmt, 2); - if (!get_offset_range (off, stmt, offrng, rvals)) - { - offrng[0] = 0; - offrng[1] = HOST_WIDE_INT_M1U; - } - return gimple_call_arg (stmt, 0); - } - - case BUILT_IN_MEMCHR: - { - tree off = gimple_call_arg (stmt, 2); - if (get_offset_range (off, stmt, offrng, rvals)) - offrng[0] = 0; - else - { - offrng[0] = 0; - offrng[1] = HOST_WIDE_INT_M1U; - } - return gimple_call_arg (stmt, 0); - } - - case BUILT_IN_STRCHR: - case BUILT_IN_STRRCHR: - case BUILT_IN_STRSTR: - { - offrng[0] = 0; - offrng[1] = HOST_WIDE_INT_M1U; - } - return gimple_call_arg (stmt, 0); - - default: - break; - } - - return NULL_TREE; -} - -/* A helper of compute_objsize_r() to determine the size from an assignment - statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. */ - -static bool -handle_min_max_size (gimple *stmt, int ostype, access_ref *pref, - ssa_name_limit_t &snlim, pointer_query *qry) -{ - tree_code code = gimple_assign_rhs_code (stmt); - - tree ptr = gimple_assign_rhs1 (stmt); - - /* In a valid MAX_/MIN_EXPR both operands must refer to the same array. - Determine the size/offset of each and use the one with more or less - space remaining, respectively. If either fails, use the information - determined from the other instead, adjusted up or down as appropriate - for the expression. */ - access_ref aref[2] = { *pref, *pref }; - if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry)) - { - aref[0].base0 = false; - aref[0].offrng[0] = aref[0].offrng[1] = 0; - aref[0].add_max_offset (); - aref[0].set_max_size_range (); - } - - ptr = gimple_assign_rhs2 (stmt); - if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry)) - { - aref[1].base0 = false; - aref[1].offrng[0] = aref[1].offrng[1] = 0; - aref[1].add_max_offset (); - aref[1].set_max_size_range (); - } - - if (!aref[0].ref && !aref[1].ref) - /* Fail if the identity of neither argument could be determined. */ - return false; - - bool i0 = false; - if (aref[0].ref && aref[0].base0) - { - if (aref[1].ref && aref[1].base0) - { - /* If the object referenced by both arguments has been determined - set *PREF to the one with more or less space remainng, whichever - is appopriate for CODE. - TODO: Indicate when the objects are distinct so it can be - diagnosed. */ - i0 = code == MAX_EXPR; - const bool i1 = !i0; - - if (aref[i0].size_remaining () < aref[i1].size_remaining ()) - *pref = aref[i1]; - else - *pref = aref[i0]; - return true; - } - - /* If only the object referenced by one of the arguments could be - determined, use it and... */ - *pref = aref[0]; - i0 = true; - } - else - *pref = aref[1]; - - const bool i1 = !i0; - /* ...see if the offset obtained from the other pointer can be used - to tighten up the bound on the offset obtained from the first. */ - if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0]) - || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1])) - { - pref->offrng[0] = aref[i0].offrng[0]; - pref->offrng[1] = aref[i0].offrng[1]; - } - return true; -} - -/* A helper of compute_objsize_r() to determine the size from ARRAY_REF - AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true - on success and false on failure. */ - -static bool -handle_array_ref (tree aref, bool addr, int ostype, access_ref *pref, - ssa_name_limit_t &snlim, pointer_query *qry) -{ - gcc_assert (TREE_CODE (aref) == ARRAY_REF); - - ++pref->deref; - - tree arefop = TREE_OPERAND (aref, 0); - tree reftype = TREE_TYPE (arefop); - if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE) - /* Avoid arrays of pointers. FIXME: Hande pointers to arrays - of known bound. */ - return false; - - if (!compute_objsize_r (arefop, ostype, pref, snlim, qry)) - return false; - - offset_int orng[2]; - tree off = pref->eval (TREE_OPERAND (aref, 1)); - range_query *const rvals = qry ? qry->rvals : NULL; - if (!get_offset_range (off, NULL, orng, rvals)) - { - /* Set ORNG to the maximum offset representable in ptrdiff_t. */ - orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); - orng[0] = -orng[1] - 1; - } - - /* Convert the array index range determined above to a byte - offset. */ - tree lowbnd = array_ref_low_bound (aref); - if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd)) - { - /* Adjust the index by the low bound of the array domain - (normally zero but 1 in Fortran). */ - unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd); - orng[0] -= lb; - orng[1] -= lb; - } - - tree eltype = TREE_TYPE (aref); - tree tpsize = TYPE_SIZE_UNIT (eltype); - if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST) - { - pref->add_max_offset (); - return true; - } - - offset_int sz = wi::to_offset (tpsize); - orng[0] *= sz; - orng[1] *= sz; - - if (ostype && TREE_CODE (eltype) == ARRAY_TYPE) - { - /* Except for the permissive raw memory functions which use - the size of the whole object determined above, use the size - of the referenced array. Because the overall offset is from - the beginning of the complete array object add this overall - offset to the size of array. */ - offset_int sizrng[2] = - { - pref->offrng[0] + orng[0] + sz, - pref->offrng[1] + orng[1] + sz - }; - if (sizrng[1] < sizrng[0]) - std::swap (sizrng[0], sizrng[1]); - if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0]) - pref->sizrng[0] = sizrng[0]; - if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1]) - pref->sizrng[1] = sizrng[1]; - } - - pref->add_offset (orng[0], orng[1]); - return true; -} - -/* A helper of compute_objsize_r() to determine the size from MEM_REF - MREF. Return true on success and false on failure. */ - -static bool -handle_mem_ref (tree mref, int ostype, access_ref *pref, - ssa_name_limit_t &snlim, pointer_query *qry) -{ - gcc_assert (TREE_CODE (mref) == MEM_REF); - - ++pref->deref; - - if (VECTOR_TYPE_P (TREE_TYPE (mref))) - { - /* Hack: Give up for MEM_REFs of vector types; those may be - synthesized from multiple assignments to consecutive data - members (see PR 93200 and 96963). - FIXME: Vectorized assignments should only be present after - vectorization so this hack is only necessary after it has - run and could be avoided in calls from prior passes (e.g., - tree-ssa-strlen.c). - FIXME: Deal with this more generally, e.g., by marking up - such MEM_REFs at the time they're created. */ - return false; - } - - tree mrefop = TREE_OPERAND (mref, 0); - if (!compute_objsize_r (mrefop, ostype, pref, snlim, qry)) - return false; - - offset_int orng[2]; - tree off = pref->eval (TREE_OPERAND (mref, 1)); - range_query *const rvals = qry ? qry->rvals : NULL; - if (!get_offset_range (off, NULL, orng, rvals)) - { - /* Set ORNG to the maximum offset representable in ptrdiff_t. */ - orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); - orng[0] = -orng[1] - 1; - } - - pref->add_offset (orng[0], orng[1]); - return true; -} - -/* Helper to compute the size of the object referenced by the PTR - expression which must have pointer type, using Object Size type - OSTYPE (only the least significant 2 bits are used). - On success, sets PREF->REF to the DECL of the referenced object - if it's unique, otherwise to null, PREF->OFFRNG to the range of - offsets into it, and PREF->SIZRNG to the range of sizes of - the object(s). - SNLIM is used to avoid visiting the same PHI operand multiple - times, and, when nonnull, RVALS to determine range information. - Returns true on success, false when a meaningful size (or range) - cannot be determined. - - The function is intended for diagnostics and should not be used - to influence code generation or optimization. */ - -static bool -compute_objsize_r (tree ptr, int ostype, access_ref *pref, - ssa_name_limit_t &snlim, pointer_query *qry) -{ - STRIP_NOPS (ptr); - - const bool addr = TREE_CODE (ptr) == ADDR_EXPR; - if (addr) - { - --pref->deref; - ptr = TREE_OPERAND (ptr, 0); - } - - if (DECL_P (ptr)) - { - pref->ref = ptr; - - if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr))) - { - /* Set the maximum size if the reference is to the pointer - itself (as opposed to what it points to). */ - pref->set_max_size_range (); - return true; - } - - if (tree size = decl_init_size (ptr, false)) - if (TREE_CODE (size) == INTEGER_CST) - { - pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size); - return true; - } - - pref->set_max_size_range (); - return true; - } - - const tree_code code = TREE_CODE (ptr); - range_query *const rvals = qry ? qry->rvals : NULL; - - if (code == BIT_FIELD_REF) - { - tree ref = TREE_OPERAND (ptr, 0); - if (!compute_objsize_r (ref, ostype, pref, snlim, qry)) - return false; - - offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2))); - pref->add_offset (off / BITS_PER_UNIT); - return true; - } - - if (code == COMPONENT_REF) - { - tree ref = TREE_OPERAND (ptr, 0); - if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE) - /* In accesses through union types consider the entire unions - rather than just their members. */ - ostype = 0; - tree field = TREE_OPERAND (ptr, 1); - - if (ostype == 0) - { - /* In OSTYPE zero (for raw memory functions like memcpy), use - the maximum size instead if the identity of the enclosing - object cannot be determined. */ - if (!compute_objsize_r (ref, ostype, pref, snlim, qry)) - return false; - - /* Otherwise, use the size of the enclosing object and add - the offset of the member to the offset computed so far. */ - tree offset = byte_position (field); - if (TREE_CODE (offset) == INTEGER_CST) - pref->add_offset (wi::to_offset (offset)); - else - pref->add_max_offset (); - - if (!pref->ref) - /* REF may have been already set to an SSA_NAME earlier - to provide better context for diagnostics. In that case, - leave it unchanged. */ - pref->ref = ref; - return true; - } - - pref->ref = field; - - if (!addr && POINTER_TYPE_P (TREE_TYPE (field))) - { - /* Set maximum size if the reference is to the pointer member - itself (as opposed to what it points to). */ - pref->set_max_size_range (); - return true; - } - - /* SAM is set for array members that might need special treatment. */ - special_array_member sam; - tree size = component_ref_size (ptr, &sam); - if (sam == special_array_member::int_0) - pref->sizrng[0] = pref->sizrng[1] = 0; - else if (!pref->trail1special && sam == special_array_member::trail_1) - pref->sizrng[0] = pref->sizrng[1] = 1; - else if (size && TREE_CODE (size) == INTEGER_CST) - pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size); - else - { - /* When the size of the member is unknown it's either a flexible - array member or a trailing special array member (either zero - length or one-element). Set the size to the maximum minus - the constant size of the type. */ - pref->sizrng[0] = 0; - pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); - if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref))) - if (TREE_CODE (recsize) == INTEGER_CST) - pref->sizrng[1] -= wi::to_offset (recsize); - } - return true; - } - - if (code == ARRAY_REF) - return handle_array_ref (ptr, addr, ostype, pref, snlim, qry); - - if (code == MEM_REF) - return handle_mem_ref (ptr, ostype, pref, snlim, qry); - - if (code == TARGET_MEM_REF) - { - tree ref = TREE_OPERAND (ptr, 0); - if (!compute_objsize_r (ref, ostype, pref, snlim, qry)) - return false; - - /* TODO: Handle remaining operands. Until then, add maximum offset. */ - pref->ref = ptr; - pref->add_max_offset (); - return true; - } - - if (code == INTEGER_CST) - { - /* Pointer constants other than null are most likely the result - of erroneous null pointer addition/subtraction. Set size to - zero. For null pointers, set size to the maximum for now - since those may be the result of jump threading. */ - if (integer_zerop (ptr)) - pref->set_max_size_range (); - else - pref->sizrng[0] = pref->sizrng[1] = 0; - pref->ref = ptr; - - return true; - } - - if (code == STRING_CST) - { - pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr); - pref->ref = ptr; - return true; - } - - if (code == POINTER_PLUS_EXPR) - { - tree ref = TREE_OPERAND (ptr, 0); - if (!compute_objsize_r (ref, ostype, pref, snlim, qry)) - return false; - - /* Clear DEREF since the offset is being applied to the target - of the dereference. */ - pref->deref = 0; - - offset_int orng[2]; - tree off = pref->eval (TREE_OPERAND (ptr, 1)); - if (get_offset_range (off, NULL, orng, rvals)) - pref->add_offset (orng[0], orng[1]); - else - pref->add_max_offset (); - return true; - } - - if (code == VIEW_CONVERT_EXPR) - { - ptr = TREE_OPERAND (ptr, 0); - return compute_objsize_r (ptr, ostype, pref, snlim, qry); - } - - if (code == SSA_NAME) - { - if (!snlim.next ()) - return false; - - /* Only process an SSA_NAME if the recursion limit has not yet - been reached. */ - if (qry) - { - if (++qry->depth) - qry->max_depth = qry->depth; - if (const access_ref *cache_ref = qry->get_ref (ptr)) - { - /* If the pointer is in the cache set *PREF to what it refers - to and return success. */ - *pref = *cache_ref; - return true; - } - } - - gimple *stmt = SSA_NAME_DEF_STMT (ptr); - if (is_gimple_call (stmt)) - { - /* If STMT is a call to an allocation function get the size - from its argument(s). If successful, also set *PREF->REF - to PTR for the caller to include in diagnostics. */ - wide_int wr[2]; - if (gimple_call_alloc_size (stmt, wr, rvals)) - { - pref->ref = ptr; - pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED); - pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED); - /* Constrain both bounds to a valid size. */ - offset_int maxsize = wi::to_offset (max_object_size ()); - if (pref->sizrng[0] > maxsize) - pref->sizrng[0] = maxsize; - if (pref->sizrng[1] > maxsize) - pref->sizrng[1] = maxsize; - } - else - { - /* For functions known to return one of their pointer arguments - try to determine what the returned pointer points to, and on - success add OFFRNG which was set to the offset added by - the function (e.g., memchr) to the overall offset. */ - offset_int offrng[2]; - if (tree ret = gimple_call_return_array (stmt, offrng, rvals)) - { - if (!compute_objsize_r (ret, ostype, pref, snlim, qry)) - return false; - - /* Cap OFFRNG[1] to at most the remaining size of - the object. */ - offset_int remrng[2]; - remrng[1] = pref->size_remaining (remrng); - if (remrng[1] < offrng[1]) - offrng[1] = remrng[1]; - pref->add_offset (offrng[0], offrng[1]); - } - else - { - /* For other calls that might return arbitrary pointers - including into the middle of objects set the size - range to maximum, clear PREF->BASE0, and also set - PREF->REF to include in diagnostics. */ - pref->set_max_size_range (); - pref->base0 = false; - pref->ref = ptr; - } - } - qry->put_ref (ptr, *pref); - return true; - } - - if (gimple_nop_p (stmt)) - { - /* For a function argument try to determine the byte size - of the array from the current function declaratation - (e.g., attribute access or related). */ - wide_int wr[2]; - bool static_array = false; - if (tree ref = gimple_parm_array_size (ptr, wr, &static_array)) - { - pref->parmarray = !static_array; - pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED); - pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED); - pref->ref = ref; - qry->put_ref (ptr, *pref); - return true; - } - - pref->set_max_size_range (); - pref->base0 = false; - pref->ref = ptr; - qry->put_ref (ptr, *pref); - return true; - } - - if (gimple_code (stmt) == GIMPLE_PHI) - { - pref->ref = ptr; - access_ref phi_ref = *pref; - if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry)) - return false; - *pref = phi_ref; - pref->ref = ptr; - qry->put_ref (ptr, *pref); - return true; - } - - if (!is_gimple_assign (stmt)) - { - /* Clear BASE0 since the assigned pointer might point into - the middle of the object, set the maximum size range and, - if the SSA_NAME refers to a function argumnent, set - PREF->REF to it. */ - pref->base0 = false; - pref->set_max_size_range (); - pref->ref = ptr; - return true; - } - - tree_code code = gimple_assign_rhs_code (stmt); - - if (code == MAX_EXPR || code == MIN_EXPR) - { - if (!handle_min_max_size (stmt, ostype, pref, snlim, qry)) - return false; - qry->put_ref (ptr, *pref); - return true; - } - - tree rhs = gimple_assign_rhs1 (stmt); - - if (code == POINTER_PLUS_EXPR - && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE) - { - /* Compute the size of the object first. */ - if (!compute_objsize_r (rhs, ostype, pref, snlim, qry)) - return false; - - offset_int orng[2]; - tree off = gimple_assign_rhs2 (stmt); - if (get_offset_range (off, stmt, orng, rvals)) - pref->add_offset (orng[0], orng[1]); - else - pref->add_max_offset (); - qry->put_ref (ptr, *pref); - return true; - } - - if (code == ADDR_EXPR - || code == SSA_NAME) - return compute_objsize_r (rhs, ostype, pref, snlim, qry); - - /* (This could also be an assignment from a nonlocal pointer.) Save - PTR to mention in diagnostics but otherwise treat it as a pointer - to an unknown object. */ - pref->ref = rhs; - pref->base0 = false; - pref->set_max_size_range (); - return true; - } - - /* Assume all other expressions point into an unknown object - of the maximum valid size. */ - pref->ref = ptr; - pref->base0 = false; - pref->set_max_size_range (); - if (TREE_CODE (ptr) == SSA_NAME) - qry->put_ref (ptr, *pref); - return true; -} - -/* A "public" wrapper around the above. Clients should use this overload - instead. */ - -tree -compute_objsize (tree ptr, int ostype, access_ref *pref, - range_query *rvals /* = NULL */) -{ - pointer_query qry; - qry.rvals = rvals; - ssa_name_limit_t snlim; - if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry)) - return NULL_TREE; - - offset_int maxsize = pref->size_remaining (); - if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0) - pref->offrng[0] = 0; - return wide_int_to_tree (sizetype, maxsize); -} - -/* Transitional wrapper. The function should be removed once callers - transition to the pointer_query API. */ - -tree -compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry) -{ - pointer_query qry; - if (ptr_qry) - ptr_qry->depth = 0; - else - ptr_qry = &qry; - - ssa_name_limit_t snlim; - if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry)) - return NULL_TREE; - - offset_int maxsize = pref->size_remaining (); - if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0) - pref->offrng[0] = 0; - return wide_int_to_tree (sizetype, maxsize); -} - -/* Legacy wrapper around the above. The function should be removed - once callers transition to one of the two above. */ - -tree -compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */, - tree *poff /* = NULL */, range_query *rvals /* = NULL */) -{ - /* Set the initial offsets to zero and size to negative to indicate - none has been computed yet. */ - access_ref ref; - tree size = compute_objsize (ptr, ostype, &ref, rvals); - if (!size || !ref.base0) - return NULL_TREE; - - if (pdecl) - *pdecl = ref.ref; - - if (poff) - *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]); - - return size; -} - -/* Helper to determine and check the sizes of the source and the destination - of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the - call expression, DEST is the destination argument, SRC is the source - argument or null, and LEN is the number of bytes. Use Object Size type-0 - regardless of the OPT_Wstringop_overflow_ setting. Return true on success - (no overflow or invalid sizes), false otherwise. */ - -static bool -check_memop_access (tree exp, tree dest, tree src, tree size) -{ - /* For functions like memset and memcpy that operate on raw memory - try to determine the size of the largest source and destination - object using type-0 Object Size regardless of the object size - type specified by the option. */ - access_data data (exp, access_read_write); - tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE; - tree dstsize = compute_objsize (dest, 0, &data.dst); - - return check_access (exp, size, /*maxread=*/NULL_TREE, - srcsize, dstsize, data.mode, &data); -} - -/* Validate memchr arguments without performing any expansion. - Return NULL_RTX. */ - -static rtx -expand_builtin_memchr (tree exp, rtx) -{ - if (!validate_arglist (exp, - POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)) - return NULL_RTX; - - tree arg1 = CALL_EXPR_ARG (exp, 0); - tree len = CALL_EXPR_ARG (exp, 2); - - check_read_access (exp, arg1, len, 0); - - return NULL_RTX; -} - /* Expand a call EXP to the memcpy builtin. Return NULL_RTX if we failed, the caller should emit a normal call, otherwise try to get the result in TARGET, if convenient (and in @@ -5901,8 +3206,6 @@ expand_builtin_memcpy (tree exp, rtx target) tree src = CALL_EXPR_ARG (exp, 1); tree len = CALL_EXPR_ARG (exp, 2); - check_memop_access (exp, dest, src, len); - return expand_builtin_memory_copy_args (dest, src, len, target, exp, /*retmode=*/ RETURN_BEGIN, false); } @@ -5921,8 +3224,6 @@ expand_builtin_memmove (tree exp, rtx target) tree src = CALL_EXPR_ARG (exp, 1); tree len = CALL_EXPR_ARG (exp, 2); - check_memop_access (exp, dest, src, len); - return expand_builtin_memory_copy_args (dest, src, len, target, exp, /*retmode=*/ RETURN_BEGIN, true); } @@ -5959,8 +3260,6 @@ expand_builtin_mempcpy (tree exp, rtx target) /* Avoid expanding mempcpy into memcpy when the call is determined to overflow the buffer. This also prevents the same overflow from being diagnosed again when expanding memcpy. */ - if (!check_memop_access (exp, dest, src, len)) - return NULL_RTX; return expand_builtin_mempcpy_args (dest, src, len, target, exp, /*retmode=*/ RETURN_END); @@ -6136,36 +3435,6 @@ expand_movstr (tree dest, tree src, rtx target, memop_ret retmode) return target; } -/* Do some very basic size validation of a call to the strcpy builtin - given by EXP. Return NULL_RTX to have the built-in expand to a call - to the library function. */ - -static rtx -expand_builtin_strcat (tree exp) -{ - if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE) - || !warn_stringop_overflow) - return NULL_RTX; - - tree dest = CALL_EXPR_ARG (exp, 0); - tree src = CALL_EXPR_ARG (exp, 1); - - /* There is no way here to determine the length of the string in - the destination to which the SRC string is being appended so - just diagnose cases when the souce string is longer than - the destination object. */ - access_data data (exp, access_read_write, NULL_TREE, true, - NULL_TREE, true); - const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1; - compute_objsize (src, ost, &data.src); - tree destsize = compute_objsize (dest, ost, &data.dst); - - check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE, - src, destsize, data.mode, &data); - - return NULL_RTX; -} - /* Expand expression EXP, which is a call to the strcpy builtin. Return NULL_RTX if we failed the caller should emit a normal call, otherwise try to get the result in TARGET, if convenient (and in mode MODE if that's @@ -6180,29 +3449,7 @@ expand_builtin_strcpy (tree exp, rtx target) tree dest = CALL_EXPR_ARG (exp, 0); tree src = CALL_EXPR_ARG (exp, 1); - if (warn_stringop_overflow) - { - access_data data (exp, access_read_write, NULL_TREE, true, - NULL_TREE, true); - const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1; - compute_objsize (src, ost, &data.src); - tree dstsize = compute_objsize (dest, ost, &data.dst); - check_access (exp, /*dstwrite=*/ NULL_TREE, - /*maxread=*/ NULL_TREE, /*srcstr=*/ src, - dstsize, data.mode, &data); - } - - if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target)) - { - /* Check to see if the argument was declared attribute nonstring - and if so, issue a warning since at this point it's not known - to be nul-terminated. */ - tree fndecl = get_callee_fndecl (exp); - maybe_warn_nonstring_arg (fndecl, exp); - return ret; - } - - return NULL_RTX; + return expand_builtin_strcpy_args (exp, dest, src, target); } /* Helper function to do the actual work for expand_builtin_strcpy. The @@ -6212,19 +3459,8 @@ expand_builtin_strcpy (tree exp, rtx target) expand_builtin_strcpy. */ static rtx -expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target) +expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target) { - /* Detect strcpy calls with unterminated arrays.. */ - tree size; - bool exact; - if (tree nonstr = unterminated_array (src, &size, &exact)) - { - /* NONSTR refers to the non-nul terminated constant array. */ - warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr, - size, exact); - return NULL_RTX; - } - return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN); } @@ -6245,15 +3481,6 @@ expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode) dst = CALL_EXPR_ARG (exp, 0); src = CALL_EXPR_ARG (exp, 1); - if (warn_stringop_overflow) - { - access_data data (exp, access_read_write); - tree destsize = compute_objsize (dst, warn_stringop_overflow - 1, - &data.dst); - check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE, - src, destsize, data.mode, &data); - } - /* If return value is ignored, transform stpcpy into strcpy. */ if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY)) { @@ -6276,9 +3503,6 @@ expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode) return expand_movstr (dst, src, target, /*retmode=*/ RETURN_END_MINUS_ONE); - if (lendata.decl) - warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl); - lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1)); ret = expand_builtin_mempcpy_args (dst, src, lenp1, target, exp, @@ -6340,44 +3564,22 @@ expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode) return NULL_RTX; } -/* Check a call EXP to the stpncpy built-in for validity. - Return NULL_RTX on both success and failure. */ - -static rtx -expand_builtin_stpncpy (tree exp, rtx) -{ - if (!validate_arglist (exp, - POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE) - || !warn_stringop_overflow) - return NULL_RTX; - - /* The source and destination of the call. */ - tree dest = CALL_EXPR_ARG (exp, 0); - tree src = CALL_EXPR_ARG (exp, 1); - - /* The exact number of bytes to write (not the maximum). */ - tree len = CALL_EXPR_ARG (exp, 2); - access_data data (exp, access_read_write); - /* The size of the destination object. */ - tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst); - check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data); - return NULL_RTX; -} - /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE) bytes from constant string DATA + OFFSET and return it as target constant. */ rtx -builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset, - scalar_int_mode mode) +builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset, + fixed_size_mode mode) { const char *str = (const char *) data; if ((unsigned HOST_WIDE_INT) offset > strlen (str)) return const0_rtx; - return c_readstr (str + offset, mode); + /* The by-pieces infrastructure does not try to pick a vector mode + for strncpy expansion. */ + return c_readstr (str + offset, as_a <scalar_int_mode> (mode)); } /* Helper to check the sizes of sequences and the destination of calls @@ -6420,10 +3622,10 @@ check_strncat_sizes (tree exp, tree objsize) if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize) && tree_int_cst_equal (objsize, maxread)) { - location_t loc = tree_inlined_location (exp); + location_t loc = EXPR_LOCATION (exp); warning_at (loc, OPT_Wstringop_overflow_, - "%K%qD specified bound %E equals destination size", - exp, get_callee_fndecl (exp), maxread); + "%qD specified bound %E equals destination size", + get_callee_fndecl (exp), maxread); return false; } @@ -6440,78 +3642,6 @@ check_strncat_sizes (tree exp, tree objsize) objsize, data.mode, &data); } -/* Similar to expand_builtin_strcat, do some very basic size validation - of a call to the strcpy builtin given by EXP. Return NULL_RTX to have - the built-in expand to a call to the library function. */ - -static rtx -expand_builtin_strncat (tree exp, rtx) -{ - if (!validate_arglist (exp, - POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE) - || !warn_stringop_overflow) - return NULL_RTX; - - tree dest = CALL_EXPR_ARG (exp, 0); - tree src = CALL_EXPR_ARG (exp, 1); - /* The upper bound on the number of bytes to write. */ - tree maxread = CALL_EXPR_ARG (exp, 2); - - /* Detect unterminated source (only). */ - if (!check_nul_terminated_array (exp, src, maxread)) - return NULL_RTX; - - /* The length of the source sequence. */ - tree slen = c_strlen (src, 1); - - /* Try to determine the range of lengths that the source expression - refers to. Since the lengths are only used for warning and not - for code generation disable strict mode below. */ - tree maxlen = slen; - if (!maxlen) - { - c_strlen_data lendata = { }; - get_range_strlen (src, &lendata, /* eltsize = */ 1); - maxlen = lendata.maxbound; - } - - access_data data (exp, access_read_write); - /* Try to verify that the destination is big enough for the shortest - string. First try to determine the size of the destination object - into which the source is being copied. */ - tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst); - - /* Add one for the terminating nul. */ - tree srclen = (maxlen - ? fold_build2 (PLUS_EXPR, size_type_node, maxlen, - size_one_node) - : NULL_TREE); - - /* The strncat function copies at most MAXREAD bytes and always appends - the terminating nul so the specified upper bound should never be equal - to (or greater than) the size of the destination. */ - if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize) - && tree_int_cst_equal (destsize, maxread)) - { - location_t loc = tree_inlined_location (exp); - warning_at (loc, OPT_Wstringop_overflow_, - "%K%qD specified bound %E equals destination size", - exp, get_callee_fndecl (exp), maxread); - - return NULL_RTX; - } - - if (!srclen - || (maxread && tree_fits_uhwi_p (maxread) - && tree_fits_uhwi_p (srclen) - && tree_int_cst_lt (maxread, srclen))) - srclen = maxread; - - check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen, - destsize, data.mode, &data); - return NULL_RTX; -} - /* Expand expression EXP, which is a call to the strncpy builtin. Return NULL_RTX if we failed the caller should emit a normal call. */ @@ -6531,18 +3661,6 @@ expand_builtin_strncpy (tree exp, rtx target) /* The length of the source sequence. */ tree slen = c_strlen (src, 1); - if (warn_stringop_overflow) - { - access_data data (exp, access_read_write, len, true, len, true); - const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1; - compute_objsize (src, ost, &data.src); - tree dstsize = compute_objsize (dest, ost, &data.dst); - /* The number of bytes to write is LEN but check_access will also - check SLEN if LEN's value isn't known. */ - check_access (exp, /*dstwrite=*/len, - /*maxread=*/len, src, dstsize, data.mode, &data); - } - /* We must be passed a constant len and src parameter. */ if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen)) return NULL_RTX; @@ -6578,30 +3696,134 @@ expand_builtin_strncpy (tree exp, rtx target) return NULL_RTX; } +/* Return the RTL of a register in MODE generated from PREV in the + previous iteration. */ + +static rtx +gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode) +{ + rtx target = nullptr; + if (prev != nullptr && prev->data != nullptr) + { + /* Use the previous data in the same mode. */ + if (prev->mode == mode) + return prev->data; + + fixed_size_mode prev_mode = prev->mode; + + /* Don't use the previous data to write QImode if it is in a + vector mode. */ + if (VECTOR_MODE_P (prev_mode) && mode == QImode) + return target; + + rtx prev_rtx = prev->data; + + if (REG_P (prev_rtx) + && HARD_REGISTER_P (prev_rtx) + && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0) + { + /* This case occurs when PREV_MODE is a vector and when + MODE is too small to store using vector operations. + After register allocation, the code will need to move the + lowpart of the vector register into a non-vector register. + + Also, the target has chosen to use a hard register + instead of going with the default choice of using a + pseudo register. We should respect that choice and try to + avoid creating a pseudo register with the same mode as the + current hard register. + + In principle, we could just use a lowpart MODE subreg of + the vector register. However, the vector register mode might + be too wide for non-vector registers, and we already know + that the non-vector mode is too small for vector registers. + It's therefore likely that we'd need to spill to memory in + the vector mode and reload the non-vector value from there. + + Try to avoid that by reducing the vector register to the + smallest size that it can hold. This should increase the + chances that non-vector registers can hold both the inner + and outer modes of the subreg that we generate later. */ + machine_mode m; + fixed_size_mode candidate; + FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode)) + if (is_a<fixed_size_mode> (m, &candidate)) + { + if (GET_MODE_SIZE (candidate) + >= GET_MODE_SIZE (prev_mode)) + break; + if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode) + && lowpart_subreg_regno (REGNO (prev_rtx), + prev_mode, candidate) >= 0) + { + target = lowpart_subreg (candidate, prev_rtx, + prev_mode); + prev_rtx = target; + prev_mode = candidate; + break; + } + } + if (target == nullptr) + prev_rtx = copy_to_reg (prev_rtx); + } + + target = lowpart_subreg (mode, prev_rtx, prev_mode); + } + return target; +} + /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE) bytes from constant string DATA + OFFSET and return it as target - constant. */ + constant. If PREV isn't nullptr, it has the RTL info from the + previous iteration. */ rtx -builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED, - scalar_int_mode mode) +builtin_memset_read_str (void *data, void *prev, + HOST_WIDE_INT offset ATTRIBUTE_UNUSED, + fixed_size_mode mode) { const char *c = (const char *) data; - char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode)); + unsigned int size = GET_MODE_SIZE (mode); + + rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev, + mode); + if (target != nullptr) + return target; + rtx src = gen_int_mode (*c, QImode); + + if (VECTOR_MODE_P (mode)) + { + gcc_assert (GET_MODE_INNER (mode) == QImode); + + rtx const_vec = gen_const_vec_duplicate (mode, src); + if (prev == NULL) + /* Return CONST_VECTOR when called by a query function. */ + return const_vec; + + /* Use the move expander with CONST_VECTOR. */ + target = targetm.gen_memset_scratch_rtx (mode); + emit_move_insn (target, const_vec); + return target; + } - memset (p, *c, GET_MODE_SIZE (mode)); + char *p = XALLOCAVEC (char, size); - return c_readstr (p, mode); + memset (p, *c, size); + + /* Vector modes should be handled above. */ + return c_readstr (p, as_a <scalar_int_mode> (mode)); } /* Callback routine for store_by_pieces. Return the RTL of a register containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned char value given in the RTL register data. For example, if mode is - 4 bytes wide, return the RTL for 0x01010101*data. */ + 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't + nullptr, it has the RTL info from the previous iteration. */ static rtx -builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED, - scalar_int_mode mode) +builtin_memset_gen_str (void *data, void *prev, + HOST_WIDE_INT offset ATTRIBUTE_UNUSED, + fixed_size_mode mode) { rtx target, coeff; size_t size; @@ -6611,9 +3833,33 @@ builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED, if (size == 1) return (rtx) data; + target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode); + if (target != nullptr) + return target; + + if (VECTOR_MODE_P (mode)) + { + gcc_assert (GET_MODE_INNER (mode) == QImode); + + /* vec_duplicate_optab is a precondition to pick a vector mode for + the memset expander. */ + insn_code icode = optab_handler (vec_duplicate_optab, mode); + + target = targetm.gen_memset_scratch_rtx (mode); + class expand_operand ops[2]; + create_output_operand (&ops[0], target, mode); + create_input_operand (&ops[1], (rtx) data, QImode); + expand_insn (icode, 2, ops); + if (!rtx_equal_p (target, ops[0].value)) + emit_move_insn (target, ops[0].value); + + return target; + } + p = XALLOCAVEC (char, size); memset (p, 1, size); - coeff = c_readstr (p, mode); + /* Vector modes should be handled above. */ + coeff = c_readstr (p, as_a <scalar_int_mode> (mode)); target = convert_to_mode (mode, (rtx) data, 1); target = expand_mult (mode, target, coeff, NULL_RTX, 1); @@ -6625,7 +3871,7 @@ builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED, try to get the result in TARGET, if convenient (and in mode MODE if that's convenient). */ -static rtx +rtx expand_builtin_memset (tree exp, rtx target, machine_mode mode) { if (!validate_arglist (exp, @@ -6636,11 +3882,171 @@ expand_builtin_memset (tree exp, rtx target, machine_mode mode) tree val = CALL_EXPR_ARG (exp, 1); tree len = CALL_EXPR_ARG (exp, 2); - check_memop_access (exp, dest, NULL_TREE, len); - return expand_builtin_memset_args (dest, val, len, target, mode, exp); } +/* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO. + Return TRUE if successful, FALSE otherwise. TO is assumed to be + aligned at an ALIGN-bits boundary. LEN must be a multiple of + 1<<CTZ_LEN between MIN_LEN and MAX_LEN. + + The strategy is to issue one store_by_pieces for each power of two, + from most to least significant, guarded by a test on whether there + are at least that many bytes left to copy in LEN. + + ??? Should we skip some powers of two in favor of loops? Maybe start + at the max of TO/LEN/word alignment, at least when optimizing for + size, instead of ensuring O(log len) dynamic compares? */ + +bool +try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len, + unsigned HOST_WIDE_INT min_len, + unsigned HOST_WIDE_INT max_len, + rtx val, char valc, unsigned int align) +{ + int max_bits = floor_log2 (max_len); + int min_bits = floor_log2 (min_len); + int sctz_len = ctz_len; + + gcc_checking_assert (sctz_len >= 0); + + if (val) + valc = 1; + + /* Bits more significant than TST_BITS are part of the shared prefix + in the binary representation of both min_len and max_len. Since + they're identical, we don't need to test them in the loop. */ + int tst_bits = (max_bits != min_bits ? max_bits + : floor_log2 (max_len ^ min_len)); + + /* Check whether it's profitable to start by storing a fixed BLKSIZE + bytes, to lower max_bits. In the unlikely case of a constant LEN + (implied by identical MAX_LEN and MIN_LEN), we want to issue a + single store_by_pieces, but otherwise, select the minimum multiple + of the ALIGN (in bytes) and of the MCD of the possible LENs, that + brings MAX_LEN below TST_BITS, if that's lower than min_len. */ + unsigned HOST_WIDE_INT blksize; + if (max_len > min_len) + { + unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len, + align / BITS_PER_UNIT); + blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng; + blksize &= ~(alrng - 1); + } + else if (max_len == min_len) + blksize = max_len; + else + gcc_unreachable (); + if (min_len >= blksize) + { + min_len -= blksize; + min_bits = floor_log2 (min_len); + max_len -= blksize; + max_bits = floor_log2 (max_len); + + tst_bits = (max_bits != min_bits ? max_bits + : floor_log2 (max_len ^ min_len)); + } + else + blksize = 0; + + /* Check that we can use store by pieces for the maximum store count + we may issue (initial fixed-size block, plus conditional + power-of-two-sized from max_bits to ctz_len. */ + unsigned HOST_WIDE_INT xlenest = blksize; + if (max_bits >= 0) + xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2 + - (HOST_WIDE_INT_1U << ctz_len)); + if (!can_store_by_pieces (xlenest, builtin_memset_read_str, + &valc, align, true)) + return false; + + by_pieces_constfn constfun; + void *constfundata; + if (val) + { + constfun = builtin_memset_gen_str; + constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node), + val); + } + else + { + constfun = builtin_memset_read_str; + constfundata = &valc; + } + + rtx ptr = copy_addr_to_reg (convert_to_mode (ptr_mode, XEXP (to, 0), 0)); + rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0)); + to = replace_equiv_address (to, ptr); + set_mem_align (to, align); + + if (blksize) + { + to = store_by_pieces (to, blksize, + constfun, constfundata, + align, true, + max_len != 0 ? RETURN_END : RETURN_BEGIN); + if (max_len == 0) + return true; + + /* Adjust PTR, TO and REM. Since TO's address is likely + PTR+offset, we have to replace it. */ + emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX)); + to = replace_equiv_address (to, ptr); + rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize); + emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX)); + } + + /* Iterate over power-of-two block sizes from the maximum length to + the least significant bit possibly set in the length. */ + for (int i = max_bits; i >= sctz_len; i--) + { + rtx_code_label *label = NULL; + blksize = HOST_WIDE_INT_1U << i; + + /* If we're past the bits shared between min_ and max_len, expand + a test on the dynamic length, comparing it with the + BLKSIZE. */ + if (i <= tst_bits) + { + label = gen_label_rtx (); + emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL, + ptr_mode, 1, label, + profile_probability::even ()); + } + /* If we are at a bit that is in the prefix shared by min_ and + max_len, skip this BLKSIZE if the bit is clear. */ + else if ((max_len & blksize) == 0) + continue; + + /* Issue a store of BLKSIZE bytes. */ + to = store_by_pieces (to, blksize, + constfun, constfundata, + align, true, + i != sctz_len ? RETURN_END : RETURN_BEGIN); + + /* Adjust REM and PTR, unless this is the last iteration. */ + if (i != sctz_len) + { + emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX)); + to = replace_equiv_address (to, ptr); + rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize); + emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX)); + } + + if (label) + { + emit_label (label); + + /* Given conditional stores, the offset can no longer be + known, so clear it. */ + clear_mem_offset (to); + } + } + + return true; +} + /* Helper function to do the actual work for expand_builtin_memset. The arguments to the builtin_memset call DEST, VAL, and LEN are broken out so that this can also be called without constructing an actual CALL_EXPR. @@ -6695,7 +4101,8 @@ expand_builtin_memset_args (tree dest, tree val, tree len, dest_mem = get_memory_rtx (dest, len); val_mode = TYPE_MODE (unsigned_char_type_node); - if (TREE_CODE (val) != INTEGER_CST) + if (TREE_CODE (val) != INTEGER_CST + || target_char_cast (val, &c)) { rtx val_rtx; @@ -6719,7 +4126,12 @@ expand_builtin_memset_args (tree dest, tree val, tree len, else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx, dest_align, expected_align, expected_size, min_size, max_size, - probable_max_size)) + probable_max_size) + && !try_store_by_multiple_pieces (dest_mem, len_rtx, + tree_ctz (len), + min_size, max_size, + val_rtx, 0, + dest_align)) goto do_libcall; dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX); @@ -6727,9 +4139,6 @@ expand_builtin_memset_args (tree dest, tree val, tree len, return dest_mem; } - if (target_char_cast (val, &c)) - goto do_libcall; - if (c) { if (tree_fits_uhwi_p (len) @@ -6743,7 +4152,12 @@ expand_builtin_memset_args (tree dest, tree val, tree len, gen_int_mode (c, val_mode), dest_align, expected_align, expected_size, min_size, max_size, - probable_max_size)) + probable_max_size) + && !try_store_by_multiple_pieces (dest_mem, len_rtx, + tree_ctz (len), + min_size, max_size, + NULL_RTX, c, + dest_align)) goto do_libcall; dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX); @@ -6757,7 +4171,7 @@ expand_builtin_memset_args (tree dest, tree val, tree len, ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL, expected_align, expected_size, min_size, max_size, - probable_max_size); + probable_max_size, tree_ctz (len)); if (dest_addr == 0) { @@ -6795,8 +4209,6 @@ expand_builtin_bzero (tree exp) tree dest = CALL_EXPR_ARG (exp, 0); tree size = CALL_EXPR_ARG (exp, 1); - check_memop_access (exp, dest, NULL_TREE, size); - /* New argument list transforming bzero(ptr x, int y) to memset(ptr x, int 0, size_t y). This is done this way so that if it isn't expanded inline, we fallback to @@ -6947,10 +4359,6 @@ expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target) tree arg1 = CALL_EXPR_ARG (exp, 0); tree arg2 = CALL_EXPR_ARG (exp, 1); - if (!check_read_access (exp, arg1) - || !check_read_access (exp, arg2)) - return NULL_RTX; - /* Due to the performance benefit, always inline the calls first. */ rtx result = NULL_RTX; result = inline_expand_builtin_bytecmp (exp, target); @@ -7032,11 +4440,6 @@ expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target) tree fndecl = get_callee_fndecl (exp); if (result) { - /* Check to see if the argument was declared attribute nonstring - and if so, issue a warning since at this point it's not known - to be nul-terminated. */ - maybe_warn_nonstring_arg (fndecl, exp); - /* Return the value in the proper mode for this function. */ machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); if (GET_MODE (result) == mode) @@ -7050,6 +4453,7 @@ expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target) /* Expand the library call ourselves using a stabilized argument list to avoid re-evaluating the function's arguments twice. */ tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2); + copy_warning (fn, exp); gcc_assert (TREE_CODE (fn) == CALL_EXPR); CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp); return expand_call (fn, target, target == const0_rtx); @@ -7071,66 +4475,10 @@ expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target, tree arg2 = CALL_EXPR_ARG (exp, 1); tree arg3 = CALL_EXPR_ARG (exp, 2); - if (!check_nul_terminated_array (exp, arg1, arg3) - || !check_nul_terminated_array (exp, arg2, arg3)) - return NULL_RTX; - - location_t loc = tree_inlined_location (exp); + location_t loc = EXPR_LOCATION (exp); tree len1 = c_strlen (arg1, 1); tree len2 = c_strlen (arg2, 1); - if (!len1 || !len2) - { - /* Check to see if the argument was declared attribute nonstring - and if so, issue a warning since at this point it's not known - to be nul-terminated. */ - if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp) - && !len1 && !len2) - { - /* A strncmp read is constrained not just by the bound but - also by the length of the shorter string. Specifying - a bound that's larger than the size of either array makes - no sense and is likely a bug. When the length of neither - of the two strings is known but the sizes of both of - the arrays they are stored in is, issue a warning if - the bound is larger than than the size of the larger - of the two arrays. */ - - access_ref ref1 (arg3, true); - access_ref ref2 (arg3, true); - - tree bndrng[2] = { NULL_TREE, NULL_TREE }; - get_size_range (arg3, bndrng, ref1.bndrng); - - tree size1 = compute_objsize (arg1, 1, &ref1); - tree size2 = compute_objsize (arg2, 1, &ref2); - tree func = get_callee_fndecl (exp); - - if (size1 && size2 && bndrng[0] && !integer_zerop (bndrng[0])) - { - offset_int rem1 = ref1.size_remaining (); - offset_int rem2 = ref2.size_remaining (); - if (rem1 == 0 || rem2 == 0) - maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func, - bndrng, integer_zero_node); - else - { - offset_int maxrem = wi::max (rem1, rem2, UNSIGNED); - if (maxrem < wi::to_offset (bndrng[0])) - maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, - func, bndrng, - wide_int_to_tree (sizetype, maxrem)); - } - } - else if (bndrng[0] - && !integer_zerop (bndrng[0]) - && ((size1 && integer_zerop (size1)) - || (size2 && integer_zerop (size2)))) - maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func, - bndrng, integer_zero_node); - } - } - /* Due to the performance benefit, always inline the calls first. */ rtx result = NULL_RTX; result = inline_expand_builtin_bytecmp (exp, target); @@ -7213,8 +4561,7 @@ expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target, /* Expand the library call ourselves using a stabilized argument list to avoid re-evaluating the function's arguments twice. */ tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len); - if (TREE_NO_WARNING (exp)) - TREE_NO_WARNING (call) = true; + copy_warning (call, exp); gcc_assert (TREE_CODE (call) == CALL_EXPR); CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp); return expand_call (call, target, target == const0_rtx); @@ -7547,25 +4894,6 @@ expand_builtin_alloca (tree exp) if (!valid_arglist) return NULL_RTX; - if ((alloca_for_var - && warn_vla_limit >= HOST_WIDE_INT_MAX - && warn_alloc_size_limit < warn_vla_limit) - || (!alloca_for_var - && warn_alloca_limit >= HOST_WIDE_INT_MAX - && warn_alloc_size_limit < warn_alloca_limit - )) - { - /* -Walloca-larger-than and -Wvla-larger-than settings of - less than HOST_WIDE_INT_MAX override the more general - -Walloc-size-larger-than so unless either of the former - options is smaller than the last one (wchich would imply - that the call was already checked), check the alloca - arguments for overflow. */ - tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE }; - int idx[] = { 0, -1 }; - maybe_warn_alloc_args_overflow (fndecl, exp, args, idx); - } - /* Compute the argument. */ op0 = expand_normal (CALL_EXPR_ARG (exp, 0)); @@ -9752,13 +7080,13 @@ expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode, case BUILT_IN_VA_ARG_PACK: /* All valid uses of __builtin_va_arg_pack () are removed during inlining. */ - error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp); + error ("invalid use of %<__builtin_va_arg_pack ()%>"); return const0_rtx; case BUILT_IN_VA_ARG_PACK_LEN: /* All valid uses of __builtin_va_arg_pack_len () are removed during inlining. */ - error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp); + error ("invalid use of %<__builtin_va_arg_pack_len ()%>"); return const0_rtx; /* Return the address of the first anonymous stack arg. */ @@ -9870,63 +7198,12 @@ expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode, return target; break; - case BUILT_IN_STRCAT: - target = expand_builtin_strcat (exp); - if (target) - return target; - break; - - case BUILT_IN_GETTEXT: - case BUILT_IN_PUTS: - case BUILT_IN_PUTS_UNLOCKED: - case BUILT_IN_STRDUP: - if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE)) - check_read_access (exp, CALL_EXPR_ARG (exp, 0)); - break; - - case BUILT_IN_INDEX: - case BUILT_IN_RINDEX: - case BUILT_IN_STRCHR: - case BUILT_IN_STRRCHR: - if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) - check_read_access (exp, CALL_EXPR_ARG (exp, 0)); - break; - - case BUILT_IN_FPUTS: - case BUILT_IN_FPUTS_UNLOCKED: - if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) - check_read_access (exp, CALL_EXPR_ARG (exp, 0)); - break; - - case BUILT_IN_STRNDUP: - if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) - check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1)); - break; - - case BUILT_IN_STRCASECMP: - case BUILT_IN_STRPBRK: - case BUILT_IN_STRSPN: - case BUILT_IN_STRCSPN: - case BUILT_IN_STRSTR: - if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) - { - check_read_access (exp, CALL_EXPR_ARG (exp, 0)); - check_read_access (exp, CALL_EXPR_ARG (exp, 1)); - } - break; - case BUILT_IN_STRCPY: target = expand_builtin_strcpy (exp, target); if (target) return target; break; - case BUILT_IN_STRNCAT: - target = expand_builtin_strncat (exp, target); - if (target) - return target; - break; - case BUILT_IN_STRNCPY: target = expand_builtin_strncpy (exp, target); if (target) @@ -9939,18 +7216,6 @@ expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode, return target; break; - case BUILT_IN_STPNCPY: - target = expand_builtin_stpncpy (exp, target); - if (target) - return target; - break; - - case BUILT_IN_MEMCHR: - target = expand_builtin_memchr (exp, target); - if (target) - return target; - break; - case BUILT_IN_MEMCPY: target = expand_builtin_memcpy (exp, target); if (target) @@ -9982,7 +7247,7 @@ expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode, break; /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it - back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater + back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter when changing it to a strcmp call. */ case BUILT_IN_STRCMP_EQ: target = expand_builtin_memcmp (exp, target, true); @@ -10952,8 +8217,11 @@ fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg) if (len) return fold_convert_loc (loc, type, len); + /* TODO: Move this to gimple-ssa-warn-access once the pass runs + also early enough to detect invalid reads in multimensional + arrays and struct members. */ if (!lendata.decl) - c_strlen (arg, 1, &lendata); + c_strlen (arg, 1, &lendata); if (lendata.decl) { @@ -12707,8 +9975,8 @@ expand_builtin_object_size (tree exp) if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) { - error ("%Kfirst argument of %qD must be a pointer, second integer constant", - exp, fndecl); + error ("first argument of %qD must be a pointer, second integer constant", + fndecl); expand_builtin_trap (); return const0_rtx; } @@ -12720,8 +9988,8 @@ expand_builtin_object_size (tree exp) || tree_int_cst_sgn (ost) < 0 || compare_tree_int (ost, 3) > 0) { - error ("%Klast argument of %qD is not integer constant between 0 and 3", - exp, fndecl); + error ("last argument of %qD is not integer constant between 0 and 3", + fndecl); expand_builtin_trap (); return const0_rtx; } @@ -12982,705 +10250,6 @@ maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode) access_write_only); } -/* Return true if STMT is a call to an allocation function. Unless - ALL_ALLOC is set, consider only functions that return dynmamically - allocated objects. Otherwise return true even for all forms of - alloca (including VLA). */ - -static bool -fndecl_alloc_p (tree fndecl, bool all_alloc) -{ - if (!fndecl) - return false; - - /* A call to operator new isn't recognized as one to a built-in. */ - if (DECL_IS_OPERATOR_NEW_P (fndecl)) - return true; - - if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)) - { - switch (DECL_FUNCTION_CODE (fndecl)) - { - case BUILT_IN_ALLOCA: - case BUILT_IN_ALLOCA_WITH_ALIGN: - return all_alloc; - case BUILT_IN_ALIGNED_ALLOC: - case BUILT_IN_CALLOC: - case BUILT_IN_GOMP_ALLOC: - case BUILT_IN_MALLOC: - case BUILT_IN_REALLOC: - case BUILT_IN_STRDUP: - case BUILT_IN_STRNDUP: - return true; - default: - break; - } - } - - /* A function is considered an allocation function if it's declared - with attribute malloc with an argument naming its associated - deallocation function. */ - tree attrs = DECL_ATTRIBUTES (fndecl); - if (!attrs) - return false; - - for (tree allocs = attrs; - (allocs = lookup_attribute ("malloc", allocs)); - allocs = TREE_CHAIN (allocs)) - { - tree args = TREE_VALUE (allocs); - if (!args) - continue; - - if (TREE_VALUE (args)) - return true; - } - - return false; -} - -/* Return true if STMT is a call to an allocation function. A wrapper - around fndecl_alloc_p. */ - -static bool -gimple_call_alloc_p (gimple *stmt, bool all_alloc = false) -{ - return fndecl_alloc_p (gimple_call_fndecl (stmt), all_alloc); -} - -/* Return the zero-based number corresponding to the argument being - deallocated if STMT is a call to a deallocation function or UINT_MAX - if it isn't. */ - -static unsigned -call_dealloc_argno (tree exp) -{ - tree fndecl = get_callee_fndecl (exp); - if (!fndecl) - return UINT_MAX; - - return fndecl_dealloc_argno (fndecl); -} - -/* Return the zero-based number corresponding to the argument being - deallocated if FNDECL is a deallocation function or UINT_MAX - if it isn't. */ - -unsigned -fndecl_dealloc_argno (tree fndecl) -{ - /* A call to operator delete isn't recognized as one to a built-in. */ - if (DECL_IS_OPERATOR_DELETE_P (fndecl)) - return 0; - - /* TODO: Handle user-defined functions with attribute malloc? Handle - known non-built-ins like fopen? */ - if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)) - { - switch (DECL_FUNCTION_CODE (fndecl)) - { - case BUILT_IN_FREE: - case BUILT_IN_REALLOC: - return 0; - default: - break; - } - return UINT_MAX; - } - - tree attrs = DECL_ATTRIBUTES (fndecl); - if (!attrs) - return UINT_MAX; - - for (tree atfree = attrs; - (atfree = lookup_attribute ("*dealloc", atfree)); - atfree = TREE_CHAIN (atfree)) - { - tree alloc = TREE_VALUE (atfree); - if (!alloc) - continue; - - tree pos = TREE_CHAIN (alloc); - if (!pos) - return 0; - - pos = TREE_VALUE (pos); - return TREE_INT_CST_LOW (pos) - 1; - } - - return UINT_MAX; -} - -/* Return true if DELC doesn't refer to an operator delete that's - suitable to call with a pointer returned from the operator new - described by NEWC. */ - -static bool -new_delete_mismatch_p (const demangle_component &newc, - const demangle_component &delc) -{ - if (newc.type != delc.type) - return true; - - switch (newc.type) - { - case DEMANGLE_COMPONENT_NAME: - { - int len = newc.u.s_name.len; - const char *news = newc.u.s_name.s; - const char *dels = delc.u.s_name.s; - if (len != delc.u.s_name.len || memcmp (news, dels, len)) - return true; - - if (news[len] == 'n') - { - if (news[len + 1] == 'a') - return dels[len] != 'd' || dels[len + 1] != 'a'; - if (news[len + 1] == 'w') - return dels[len] != 'd' || dels[len + 1] != 'l'; - } - return false; - } - - case DEMANGLE_COMPONENT_OPERATOR: - /* Operator mismatches are handled above. */ - return false; - - case DEMANGLE_COMPONENT_EXTENDED_OPERATOR: - if (newc.u.s_extended_operator.args != delc.u.s_extended_operator.args) - return true; - return new_delete_mismatch_p (*newc.u.s_extended_operator.name, - *delc.u.s_extended_operator.name); - - case DEMANGLE_COMPONENT_FIXED_TYPE: - if (newc.u.s_fixed.accum != delc.u.s_fixed.accum - || newc.u.s_fixed.sat != delc.u.s_fixed.sat) - return true; - return new_delete_mismatch_p (*newc.u.s_fixed.length, - *delc.u.s_fixed.length); - - case DEMANGLE_COMPONENT_CTOR: - if (newc.u.s_ctor.kind != delc.u.s_ctor.kind) - return true; - return new_delete_mismatch_p (*newc.u.s_ctor.name, - *delc.u.s_ctor.name); - - case DEMANGLE_COMPONENT_DTOR: - if (newc.u.s_dtor.kind != delc.u.s_dtor.kind) - return true; - return new_delete_mismatch_p (*newc.u.s_dtor.name, - *delc.u.s_dtor.name); - - case DEMANGLE_COMPONENT_BUILTIN_TYPE: - { - /* The demangler API provides no better way to compare built-in - types except to by comparing their demangled names. */ - size_t nsz, dsz; - demangle_component *pnc = const_cast<demangle_component *>(&newc); - demangle_component *pdc = const_cast<demangle_component *>(&delc); - char *nts = cplus_demangle_print (0, pnc, 16, &nsz); - char *dts = cplus_demangle_print (0, pdc, 16, &dsz); - if (!nts != !dts) - return true; - bool mismatch = strcmp (nts, dts); - free (nts); - free (dts); - return mismatch; - } - - case DEMANGLE_COMPONENT_SUB_STD: - if (newc.u.s_string.len != delc.u.s_string.len) - return true; - return memcmp (newc.u.s_string.string, delc.u.s_string.string, - newc.u.s_string.len); - - case DEMANGLE_COMPONENT_FUNCTION_PARAM: - case DEMANGLE_COMPONENT_TEMPLATE_PARAM: - return newc.u.s_number.number != delc.u.s_number.number; - - case DEMANGLE_COMPONENT_CHARACTER: - return newc.u.s_character.character != delc.u.s_character.character; - - case DEMANGLE_COMPONENT_DEFAULT_ARG: - case DEMANGLE_COMPONENT_LAMBDA: - if (newc.u.s_unary_num.num != delc.u.s_unary_num.num) - return true; - return new_delete_mismatch_p (*newc.u.s_unary_num.sub, - *delc.u.s_unary_num.sub); - default: - break; - } - - if (!newc.u.s_binary.left != !delc.u.s_binary.left) - return true; - - if (!newc.u.s_binary.left) - return false; - - if (new_delete_mismatch_p (*newc.u.s_binary.left, *delc.u.s_binary.left) - || !newc.u.s_binary.right != !delc.u.s_binary.right) - return true; - - if (newc.u.s_binary.right) - return new_delete_mismatch_p (*newc.u.s_binary.right, - *delc.u.s_binary.right); - return false; -} - -/* Return true if DELETE_DECL is an operator delete that's not suitable - to call with a pointer returned fron NEW_DECL. */ - -static bool -new_delete_mismatch_p (tree new_decl, tree delete_decl) -{ - tree new_name = DECL_ASSEMBLER_NAME (new_decl); - tree delete_name = DECL_ASSEMBLER_NAME (delete_decl); - - /* valid_new_delete_pair_p() returns a conservative result (currently - it only handles global operators). A true result is reliable but - a false result doesn't necessarily mean the operators don't match. */ - if (valid_new_delete_pair_p (new_name, delete_name)) - return false; - - /* For anything not handled by valid_new_delete_pair_p() such as member - operators compare the individual demangled components of the mangled - name. */ - const char *new_str = IDENTIFIER_POINTER (new_name); - const char *del_str = IDENTIFIER_POINTER (delete_name); - - void *np = NULL, *dp = NULL; - demangle_component *ndc = cplus_demangle_v3_components (new_str, 0, &np); - demangle_component *ddc = cplus_demangle_v3_components (del_str, 0, &dp); - bool mismatch = new_delete_mismatch_p (*ndc, *ddc); - free (np); - free (dp); - return mismatch; -} - -/* ALLOC_DECL and DEALLOC_DECL are pair of allocation and deallocation - functions. Return true if the latter is suitable to deallocate objects - allocated by calls to the former. */ - -static bool -matching_alloc_calls_p (tree alloc_decl, tree dealloc_decl) -{ - /* Set to alloc_kind_t::builtin if ALLOC_DECL is associated with - a built-in deallocator. */ - enum class alloc_kind_t { none, builtin, user } - alloc_dealloc_kind = alloc_kind_t::none; - - if (DECL_IS_OPERATOR_NEW_P (alloc_decl)) - { - if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)) - /* Return true iff both functions are of the same array or - singleton form and false otherwise. */ - return !new_delete_mismatch_p (alloc_decl, dealloc_decl); - - /* Return false for deallocation functions that are known not - to match. */ - if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE) - || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC)) - return false; - /* Otherwise proceed below to check the deallocation function's - "*dealloc" attributes to look for one that mentions this operator - new. */ - } - else if (fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL)) - { - switch (DECL_FUNCTION_CODE (alloc_decl)) - { - case BUILT_IN_ALLOCA: - case BUILT_IN_ALLOCA_WITH_ALIGN: - return false; - - case BUILT_IN_ALIGNED_ALLOC: - case BUILT_IN_CALLOC: - case BUILT_IN_GOMP_ALLOC: - case BUILT_IN_MALLOC: - case BUILT_IN_REALLOC: - case BUILT_IN_STRDUP: - case BUILT_IN_STRNDUP: - if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)) - return false; - - if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE) - || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC)) - return true; - - alloc_dealloc_kind = alloc_kind_t::builtin; - break; - - default: - break; - } - } - - /* Set if DEALLOC_DECL both allocates and deallocates. */ - alloc_kind_t realloc_kind = alloc_kind_t::none; - - if (fndecl_built_in_p (dealloc_decl, BUILT_IN_NORMAL)) - { - built_in_function dealloc_code = DECL_FUNCTION_CODE (dealloc_decl); - if (dealloc_code == BUILT_IN_REALLOC) - realloc_kind = alloc_kind_t::builtin; - - for (tree amats = DECL_ATTRIBUTES (alloc_decl); - (amats = lookup_attribute ("malloc", amats)); - amats = TREE_CHAIN (amats)) - { - tree args = TREE_VALUE (amats); - if (!args) - continue; - - tree fndecl = TREE_VALUE (args); - if (!fndecl || !DECL_P (fndecl)) - continue; - - if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL) - && dealloc_code == DECL_FUNCTION_CODE (fndecl)) - return true; - } - } - - const bool alloc_builtin = fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL); - alloc_kind_t realloc_dealloc_kind = alloc_kind_t::none; - - /* If DEALLOC_DECL has an internal "*dealloc" attribute scan the list - of its associated allocation functions for ALLOC_DECL. - If the corresponding ALLOC_DECL is found they're a matching pair, - otherwise they're not. - With DDATS set to the Deallocator's *Dealloc ATtributes... */ - for (tree ddats = DECL_ATTRIBUTES (dealloc_decl); - (ddats = lookup_attribute ("*dealloc", ddats)); - ddats = TREE_CHAIN (ddats)) - { - tree args = TREE_VALUE (ddats); - if (!args) - continue; - - tree alloc = TREE_VALUE (args); - if (!alloc) - continue; - - if (alloc == DECL_NAME (dealloc_decl)) - realloc_kind = alloc_kind_t::user; - - if (DECL_P (alloc)) - { - gcc_checking_assert (fndecl_built_in_p (alloc, BUILT_IN_NORMAL)); - - switch (DECL_FUNCTION_CODE (alloc)) - { - case BUILT_IN_ALIGNED_ALLOC: - case BUILT_IN_CALLOC: - case BUILT_IN_GOMP_ALLOC: - case BUILT_IN_MALLOC: - case BUILT_IN_REALLOC: - case BUILT_IN_STRDUP: - case BUILT_IN_STRNDUP: - realloc_dealloc_kind = alloc_kind_t::builtin; - break; - default: - break; - } - - if (!alloc_builtin) - continue; - - if (DECL_FUNCTION_CODE (alloc) != DECL_FUNCTION_CODE (alloc_decl)) - continue; - - return true; - } - - if (alloc == DECL_NAME (alloc_decl)) - return true; - } - - if (realloc_kind == alloc_kind_t::none) - return false; - - hash_set<tree> common_deallocs; - /* Special handling for deallocators. Iterate over both the allocator's - and the reallocator's associated deallocator functions looking for - the first one in common. If one is found, the de/reallocator is - a match for the allocator even though the latter isn't directly - associated with the former. This simplifies declarations in system - headers. - With AMATS set to the Allocator's Malloc ATtributes, - and RMATS set to Reallocator's Malloc ATtributes... */ - for (tree amats = DECL_ATTRIBUTES (alloc_decl), - rmats = DECL_ATTRIBUTES (dealloc_decl); - (amats = lookup_attribute ("malloc", amats)) - || (rmats = lookup_attribute ("malloc", rmats)); - amats = amats ? TREE_CHAIN (amats) : NULL_TREE, - rmats = rmats ? TREE_CHAIN (rmats) : NULL_TREE) - { - if (tree args = amats ? TREE_VALUE (amats) : NULL_TREE) - if (tree adealloc = TREE_VALUE (args)) - { - if (DECL_P (adealloc) - && fndecl_built_in_p (adealloc, BUILT_IN_NORMAL)) - { - built_in_function fncode = DECL_FUNCTION_CODE (adealloc); - if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC) - { - if (realloc_kind == alloc_kind_t::builtin) - return true; - alloc_dealloc_kind = alloc_kind_t::builtin; - } - continue; - } - - common_deallocs.add (adealloc); - } - - if (tree args = rmats ? TREE_VALUE (rmats) : NULL_TREE) - if (tree ddealloc = TREE_VALUE (args)) - { - if (DECL_P (ddealloc) - && fndecl_built_in_p (ddealloc, BUILT_IN_NORMAL)) - { - built_in_function fncode = DECL_FUNCTION_CODE (ddealloc); - if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC) - { - if (alloc_dealloc_kind == alloc_kind_t::builtin) - return true; - realloc_dealloc_kind = alloc_kind_t::builtin; - } - continue; - } - - if (common_deallocs.add (ddealloc)) - return true; - } - } - - /* Succeed only if ALLOC_DECL and the reallocator DEALLOC_DECL share - a built-in deallocator. */ - return (alloc_dealloc_kind == alloc_kind_t::builtin - && realloc_dealloc_kind == alloc_kind_t::builtin); -} - -/* Return true if DEALLOC_DECL is a function suitable to deallocate - objectes allocated by the ALLOC call. */ - -static bool -matching_alloc_calls_p (gimple *alloc, tree dealloc_decl) -{ - tree alloc_decl = gimple_call_fndecl (alloc); - if (!alloc_decl) - return true; - - return matching_alloc_calls_p (alloc_decl, dealloc_decl); -} - -/* Diagnose a call EXP to deallocate a pointer referenced by AREF if it - includes a nonzero offset. Such a pointer cannot refer to the beginning - of an allocated object. A negative offset may refer to it only if - the target pointer is unknown. */ - -static bool -warn_dealloc_offset (location_t loc, tree exp, const access_ref &aref) -{ - if (aref.deref || aref.offrng[0] <= 0 || aref.offrng[1] <= 0) - return false; - - tree dealloc_decl = get_callee_fndecl (exp); - if (!dealloc_decl) - return false; - - if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl) - && !DECL_IS_REPLACEABLE_OPERATOR (dealloc_decl)) - { - /* A call to a user-defined operator delete with a pointer plus offset - may be valid if it's returned from an unknown function (i.e., one - that's not operator new). */ - if (TREE_CODE (aref.ref) == SSA_NAME) - { - gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref); - if (is_gimple_call (def_stmt)) - { - tree alloc_decl = gimple_call_fndecl (def_stmt); - if (!alloc_decl || !DECL_IS_OPERATOR_NEW_P (alloc_decl)) - return false; - } - } - } - - char offstr[80]; - offstr[0] = '\0'; - if (wi::fits_shwi_p (aref.offrng[0])) - { - if (aref.offrng[0] == aref.offrng[1] - || !wi::fits_shwi_p (aref.offrng[1])) - sprintf (offstr, " %lli", - (long long)aref.offrng[0].to_shwi ()); - else - sprintf (offstr, " [%lli, %lli]", - (long long)aref.offrng[0].to_shwi (), - (long long)aref.offrng[1].to_shwi ()); - } - - if (!warning_at (loc, OPT_Wfree_nonheap_object, - "%K%qD called on pointer %qE with nonzero offset%s", - exp, dealloc_decl, aref.ref, offstr)) - return false; - - if (DECL_P (aref.ref)) - inform (DECL_SOURCE_LOCATION (aref.ref), "declared here"); - else if (TREE_CODE (aref.ref) == SSA_NAME) - { - gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref); - if (is_gimple_call (def_stmt)) - { - location_t def_loc = gimple_location (def_stmt); - tree alloc_decl = gimple_call_fndecl (def_stmt); - if (alloc_decl) - inform (def_loc, - "returned from %qD", alloc_decl); - else if (tree alloc_fntype = gimple_call_fntype (def_stmt)) - inform (def_loc, - "returned from %qT", alloc_fntype); - else - inform (def_loc, "obtained here"); - } - } - - return true; -} - -/* Issue a warning if a deallocation function such as free, realloc, - or C++ operator delete is called with an argument not returned by - a matching allocation function such as malloc or the corresponding - form of C++ operatorn new. */ - -void -maybe_emit_free_warning (tree exp) -{ - tree fndecl = get_callee_fndecl (exp); - if (!fndecl) - return; - - unsigned argno = call_dealloc_argno (exp); - if ((unsigned) call_expr_nargs (exp) <= argno) - return; - - tree ptr = CALL_EXPR_ARG (exp, argno); - if (integer_zerop (ptr)) - return; - - access_ref aref; - if (!compute_objsize (ptr, 0, &aref)) - return; - - tree ref = aref.ref; - if (integer_zerop (ref)) - return; - - tree dealloc_decl = get_callee_fndecl (exp); - location_t loc = tree_inlined_location (exp); - - if (DECL_P (ref) || EXPR_P (ref)) - { - /* Diagnose freeing a declared object. */ - if (aref.ref_declared () - && warning_at (loc, OPT_Wfree_nonheap_object, - "%K%qD called on unallocated object %qD", - exp, dealloc_decl, ref)) - { - loc = (DECL_P (ref) - ? DECL_SOURCE_LOCATION (ref) - : EXPR_LOCATION (ref)); - inform (loc, "declared here"); - return; - } - - /* Diagnose freeing a pointer that includes a positive offset. - Such a pointer cannot refer to the beginning of an allocated - object. A negative offset may refer to it. */ - if (aref.sizrng[0] != aref.sizrng[1] - && warn_dealloc_offset (loc, exp, aref)) - return; - } - else if (CONSTANT_CLASS_P (ref)) - { - if (warning_at (loc, OPT_Wfree_nonheap_object, - "%K%qD called on a pointer to an unallocated " - "object %qE", exp, dealloc_decl, ref)) - { - if (TREE_CODE (ptr) == SSA_NAME) - { - gimple *def_stmt = SSA_NAME_DEF_STMT (ptr); - if (is_gimple_assign (def_stmt)) - { - location_t loc = gimple_location (def_stmt); - inform (loc, "assigned here"); - } - } - return; - } - } - else if (TREE_CODE (ref) == SSA_NAME) - { - /* Also warn if the pointer argument refers to the result - of an allocation call like alloca or VLA. */ - gimple *def_stmt = SSA_NAME_DEF_STMT (ref); - if (is_gimple_call (def_stmt)) - { - bool warned = false; - if (gimple_call_alloc_p (def_stmt)) - { - if (matching_alloc_calls_p (def_stmt, dealloc_decl)) - { - if (warn_dealloc_offset (loc, exp, aref)) - return; - } - else - { - tree alloc_decl = gimple_call_fndecl (def_stmt); - int opt = (DECL_IS_OPERATOR_NEW_P (alloc_decl) - || DECL_IS_OPERATOR_DELETE_P (dealloc_decl) - ? OPT_Wmismatched_new_delete - : OPT_Wmismatched_dealloc); - warned = warning_at (loc, opt, - "%K%qD called on pointer returned " - "from a mismatched allocation " - "function", exp, dealloc_decl); - } - } - else if (gimple_call_builtin_p (def_stmt, BUILT_IN_ALLOCA) - || gimple_call_builtin_p (def_stmt, - BUILT_IN_ALLOCA_WITH_ALIGN)) - warned = warning_at (loc, OPT_Wfree_nonheap_object, - "%K%qD called on pointer to " - "an unallocated object", - exp, dealloc_decl); - else if (warn_dealloc_offset (loc, exp, aref)) - return; - - if (warned) - { - tree fndecl = gimple_call_fndecl (def_stmt); - inform (gimple_location (def_stmt), - "returned from %qD", fndecl); - return; - } - } - else if (gimple_nop_p (def_stmt)) - { - ref = SSA_NAME_VAR (ref); - /* Diagnose freeing a pointer that includes a positive offset. */ - if (TREE_CODE (ref) == PARM_DECL - && !aref.deref - && aref.sizrng[0] != aref.sizrng[1] - && aref.offrng[0] > 0 && aref.offrng[1] > 0 - && warn_dealloc_offset (loc, exp, aref)) - return; - } - } -} - /* Fold a call to __builtin_object_size with arguments PTR and OST, if possible. */ @@ -13753,7 +10322,7 @@ fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs) { ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); SET_EXPR_LOCATION (ret, loc); - TREE_NO_WARNING (ret) = 1; + suppress_warning (ret); return ret; } return NULL_TREE; @@ -14286,8 +10855,8 @@ target_char_cst_p (tree t, char *p) } /* Return true if the builtin DECL is implemented in a standard library. - Otherwise returns false which doesn't guarantee it is not (thus the list of - handled builtins below may be incomplete). */ + Otherwise return false which doesn't guarantee it is not (thus the list + of handled builtins below may be incomplete). */ bool builtin_with_linkage_p (tree decl) @@ -14366,6 +10935,14 @@ builtin_with_linkage_p (tree decl) CASE_FLT_FN (BUILT_IN_TRUNC): CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC): return true; + + case BUILT_IN_STPCPY: + case BUILT_IN_STPNCPY: + /* stpcpy is both referenced in libiberty's pex-win32.c and provided + by libiberty's stpcpy.c for MinGW targets so we need to return true + in order to be able to build libiberty in LTO mode for them. */ + return true; + default: break; } |