diff options
author | Richard Biener <rguenther@suse.de> | 2021-04-14 13:40:58 +0200 |
---|---|---|
committer | Richard Biener <rguenther@suse.de> | 2021-04-26 10:08:46 +0200 |
commit | 52a5515ed6619739eb122f05ce26057dd8b06fb6 (patch) | |
tree | ce23cf5cca7daae1d47e61007eccfdbfe862fe13 /gcc/gimple-fold.c | |
parent | 297bfacdb448c0d29b8dfac2818350b90902bc75 (diff) | |
download | gcc-52a5515ed6619739eb122f05ce26057dd8b06fb6.zip gcc-52a5515ed6619739eb122f05ce26057dd8b06fb6.tar.gz gcc-52a5515ed6619739eb122f05ce26057dd8b06fb6.tar.bz2 |
Simplify {gimplify_and_,}update_call_from_tree API
This removes update_call_from_tree in favor of
gimplify_and_update_call_from_tree, removing some code duplication
and simplifying the API use. Some users of update_call_from_tree
have been transitioned to replace_call_with_value and the API
and its dependences have been moved to gimple-fold.h.
This shaves off another user of valid_gimple_rhs_p which is now
only used from within gimple-fold.c and thus moved and made private.
2021-04-14 Richard Biener <rguenther@suse.de>
* tree-ssa-propagate.h (valid_gimple_rhs_p): Remove.
(update_gimple_call): Likewise.
(update_call_from_tree): Likewise.
* tree-ssa-propagate.c (valid_gimple_rhs_p): Remove.
(valid_gimple_call_p): Likewise.
(move_ssa_defining_stmt_for_defs): Likewise.
(finish_update_gimple_call): Likewise.
(update_gimple_call): Likewise.
(update_call_from_tree): Likewise.
(propagate_tree_value_into_stmt): Use replace_call_with_value.
* gimple-fold.h (update_gimple_call): Declare.
* gimple-fold.c (valid_gimple_rhs_p): Move here from
tree-ssa-propagate.c.
(update_gimple_call): Likewise.
(valid_gimple_call_p): Likewise.
(finish_update_gimple_call): Likewise, and simplify.
(gimplify_and_update_call_from_tree): Implement
update_call_from_tree functionality, avoid excessive
push/pop_gimplify_context.
(gimple_fold_builtin): Use only gimplify_and_update_call_from_tree.
(gimple_fold_call): Likewise.
* gimple-ssa-sprintf.c (try_substitute_return_value): Likewise.
* tree-ssa-ccp.c (ccp_folder::fold_stmt): Likewise.
(pass_fold_builtins::execute): Likewise.
(optimize_stack_restore): Use replace_call_with_value.
* tree-cfg.c (fold_loop_internal_call): Likewise.
* tree-ssa-dce.c (maybe_optimize_arith_overflow): Use
only gimplify_and_update_call_from_tree.
* tree-ssa-strlen.c (handle_builtin_strlen): Likewise.
(handle_builtin_strchr): Likewise.
* tsan.c: Include gimple-fold.h instead of tree-ssa-propagate.h.
* config/rs6000/rs6000-call.c (rs6000_gimple_fold_builtin):
Use replace_call_with_value.
Diffstat (limited to 'gcc/gimple-fold.c')
-rw-r--r-- | gcc/gimple-fold.c | 219 |
1 files changed, 211 insertions, 8 deletions
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c index 7602018..aa33779b 100644 --- a/gcc/gimple-fold.c +++ b/gcc/gimple-fold.c @@ -335,6 +335,123 @@ maybe_fold_reference (tree expr) return NULL_TREE; } +/* Return true if EXPR is an acceptable right-hand-side for a + GIMPLE assignment. We validate the entire tree, not just + the root node, thus catching expressions that embed complex + operands that are not permitted in GIMPLE. This function + is needed because the folding routines in fold-const.c + may return such expressions in some cases, e.g., an array + access with an embedded index addition. It may make more + sense to have folding routines that are sensitive to the + constraints on GIMPLE operands, rather than abandoning any + any attempt to fold if the usual folding turns out to be too + aggressive. */ + +bool +valid_gimple_rhs_p (tree expr) +{ + enum tree_code code = TREE_CODE (expr); + + switch (TREE_CODE_CLASS (code)) + { + case tcc_declaration: + if (!is_gimple_variable (expr)) + return false; + break; + + case tcc_constant: + /* All constants are ok. */ + break; + + case tcc_comparison: + /* GENERIC allows comparisons with non-boolean types, reject + those for GIMPLE. Let vector-typed comparisons pass - rules + for GENERIC and GIMPLE are the same here. */ + if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr)) + && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE + || TYPE_PRECISION (TREE_TYPE (expr)) == 1)) + && ! VECTOR_TYPE_P (TREE_TYPE (expr))) + return false; + + /* Fallthru. */ + case tcc_binary: + if (!is_gimple_val (TREE_OPERAND (expr, 0)) + || !is_gimple_val (TREE_OPERAND (expr, 1))) + return false; + break; + + case tcc_unary: + if (!is_gimple_val (TREE_OPERAND (expr, 0))) + return false; + break; + + case tcc_expression: + switch (code) + { + case ADDR_EXPR: + { + tree t; + if (is_gimple_min_invariant (expr)) + return true; + t = TREE_OPERAND (expr, 0); + while (handled_component_p (t)) + { + /* ??? More checks needed, see the GIMPLE verifier. */ + if ((TREE_CODE (t) == ARRAY_REF + || TREE_CODE (t) == ARRAY_RANGE_REF) + && !is_gimple_val (TREE_OPERAND (t, 1))) + return false; + t = TREE_OPERAND (t, 0); + } + if (!is_gimple_id (t)) + return false; + } + break; + + default: + if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS) + { + if ((code == COND_EXPR + ? !is_gimple_condexpr (TREE_OPERAND (expr, 0)) + : !is_gimple_val (TREE_OPERAND (expr, 0))) + || !is_gimple_val (TREE_OPERAND (expr, 1)) + || !is_gimple_val (TREE_OPERAND (expr, 2))) + return false; + break; + } + return false; + } + break; + + case tcc_vl_exp: + return false; + + case tcc_exceptional: + if (code == CONSTRUCTOR) + { + unsigned i; + tree elt; + FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt) + if (!is_gimple_val (elt)) + return false; + return true; + } + if (code != SSA_NAME) + return false; + break; + + case tcc_reference: + if (code == BIT_FIELD_REF) + return is_gimple_val (TREE_OPERAND (expr, 0)); + return false; + + default: + return false; + } + + return true; +} + /* Attempt to fold an assignment statement pointed-to by SI. Returns a replacement rhs for the statement or NULL_TREE if no simplification @@ -534,6 +651,72 @@ gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts) gsi_replace_with_seq (si_p, stmts, false); } +/* Helper function for update_gimple_call and + gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced + with GIMPLE_CALL NEW_STMT. */ + +static void +finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt, + gimple *stmt) +{ + tree lhs = gimple_call_lhs (stmt); + gimple_call_set_lhs (new_stmt, lhs); + if (lhs && TREE_CODE (lhs) == SSA_NAME) + SSA_NAME_DEF_STMT (lhs) = new_stmt; + gimple_move_vops (new_stmt, stmt); + gimple_set_location (new_stmt, gimple_location (stmt)); + if (gimple_block (new_stmt) == NULL_TREE) + gimple_set_block (new_stmt, gimple_block (stmt)); + gsi_replace (si_p, new_stmt, false); +} + +/* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN + with number of arguments NARGS, where the arguments in GIMPLE form + follow NARGS argument. */ + +bool +update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...) +{ + va_list ap; + gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p)); + + gcc_assert (is_gimple_call (stmt)); + va_start (ap, nargs); + new_stmt = gimple_build_call_valist (fn, nargs, ap); + finish_update_gimple_call (si_p, new_stmt, stmt); + va_end (ap); + return true; +} + +/* Return true if EXPR is a CALL_EXPR suitable for representation + as a single GIMPLE_CALL statement. If the arguments require + further gimplification, return false. */ + +static bool +valid_gimple_call_p (tree expr) +{ + unsigned i, nargs; + + if (TREE_CODE (expr) != CALL_EXPR) + return false; + + nargs = call_expr_nargs (expr); + for (i = 0; i < nargs; i++) + { + tree arg = CALL_EXPR_ARG (expr, i); + if (is_gimple_reg_type (TREE_TYPE (arg))) + { + if (!is_gimple_val (arg)) + return false; + } + else + if (!is_gimple_lvalue (arg)) + return false; + } + + return true; +} + /* Convert EXPR into a GIMPLE value suitable for substitution on the RHS of an assignment. Insert the necessary statements before iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL @@ -556,17 +739,41 @@ gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) gcc_assert (is_gimple_call (stmt)); - push_gimplify_context (gimple_in_ssa_p (cfun)); + if (valid_gimple_call_p (expr)) + { + /* The call has simplified to another call. */ + tree fn = CALL_EXPR_FN (expr); + unsigned i; + unsigned nargs = call_expr_nargs (expr); + vec<tree> args = vNULL; + gcall *new_stmt; + + if (nargs > 0) + { + args.create (nargs); + args.safe_grow_cleared (nargs, true); + + for (i = 0; i < nargs; i++) + args[i] = CALL_EXPR_ARG (expr, i); + } + + new_stmt = gimple_build_call_vec (fn, args); + finish_update_gimple_call (si_p, new_stmt, stmt); + args.release (); + return; + } lhs = gimple_call_lhs (stmt); if (lhs == NULL_TREE) { + push_gimplify_context (gimple_in_ssa_p (cfun)); gimplify_and_add (expr, &stmts); + pop_gimplify_context (NULL); + /* We can end up with folding a memcpy of an empty class assignment which gets optimized away by C++ gimplification. */ if (gimple_seq_empty_p (stmts)) { - pop_gimplify_context (NULL); if (gimple_in_ssa_p (cfun)) { unlink_stmt_vdef (stmt); @@ -585,8 +792,6 @@ gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) GSI_CONTINUE_LINKING); } - pop_gimplify_context (NULL); - gsi_replace_with_seq_vops (si_p, stmts); } @@ -4955,8 +5160,7 @@ gimple_fold_builtin (gimple_stmt_iterator *gsi) STRIP_NOPS (result); else result = fold_convert (gimple_call_return_type (stmt), result); - if (!update_call_from_tree (gsi, result)) - gimplify_and_update_call_from_tree (gsi, result); + gimplify_and_update_call_from_tree (gsi, result); return true; } @@ -5552,8 +5756,7 @@ gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace) result = build2_loc (gimple_location (stmt), COMPLEX_EXPR, ctype, result, overflow); } - if (!update_call_from_tree (gsi, result)) - gimplify_and_update_call_from_tree (gsi, result); + gimplify_and_update_call_from_tree (gsi, result); changed = true; } } |