diff options
author | Uros Bizjak <ubizjak@gmail.com> | 2023-06-29 17:29:03 +0200 |
---|---|---|
committer | Uros Bizjak <ubizjak@gmail.com> | 2023-06-29 17:29:47 +0200 |
commit | c41332abe7da981abdc23440e7e859b92ba065ec (patch) | |
tree | 506f6ba2316c1c57f609d11a323f74608e9deaf3 /gcc/expr.cc | |
parent | 5dfdf0ae4dca44a4f572c346d322fd6244598190 (diff) | |
download | gcc-c41332abe7da981abdc23440e7e859b92ba065ec.zip gcc-c41332abe7da981abdc23440e7e859b92ba065ec.tar.gz gcc-c41332abe7da981abdc23440e7e859b92ba065ec.tar.bz2 |
cselib+expr+bitmap: Change return type of predicate functions from int to bool
gcc/ChangeLog:
* cselib.h (rtx_equal_for_cselib_1):
Change return type from int to bool.
(references_value_p): Ditto.
(rtx_equal_for_cselib_p): Ditto.
* expr.h (can_store_by_pieces): Ditto.
(try_casesi): Ditto.
(try_tablejump): Ditto.
(safe_from_p): Ditto.
* sbitmap.h (bitmap_equal_p): Ditto.
* cselib.cc (references_value_p): Change return type
from int to void and adjust function body accordingly.
(rtx_equal_for_cselib_1): Ditto.
* expr.cc (is_aligning_offset): Ditto.
(can_store_by_pieces): Ditto.
(mostly_zeros_p): Ditto.
(all_zeros_p): Ditto.
(safe_from_p): Ditto.
(is_aligning_offset): Ditto.
(try_casesi): Ditto.
(try_tablejump): Ditto.
(store_constructor): Change "need_to_clear" and
"const_bounds_p" variables to bool.
* sbitmap.cc (bitmap_equal_p): Change return type from int to bool.
Diffstat (limited to 'gcc/expr.cc')
-rw-r--r-- | gcc/expr.cc | 104 |
1 files changed, 52 insertions, 52 deletions
diff --git a/gcc/expr.cc b/gcc/expr.cc index b7f4e2f..fff09dc 100644 --- a/gcc/expr.cc +++ b/gcc/expr.cc @@ -89,7 +89,7 @@ static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64, static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree); -static int is_aligning_offset (const_tree, const_tree); +static bool is_aligning_offset (const_tree, const_tree); static rtx reduce_to_bit_field_precision (rtx, rtx, tree); static rtx do_store_flag (sepops, rtx, machine_mode); #ifdef PUSH_ROUNDING @@ -1691,9 +1691,9 @@ store_by_pieces_d::finish_retmode (memop_ret retmode) a pointer which will be passed as argument in every CONSTFUN call. ALIGN is maximum alignment we can assume. MEMSETP is true if this is a memset operation and false if it's a copy of a constant string. - Return nonzero if a call to store_by_pieces should succeed. */ + Return true if a call to store_by_pieces should succeed. */ -int +bool can_store_by_pieces (unsigned HOST_WIDE_INT len, by_pieces_constfn constfun, void *constfundata, unsigned int align, bool memsetp) @@ -1707,14 +1707,14 @@ can_store_by_pieces (unsigned HOST_WIDE_INT len, rtx cst ATTRIBUTE_UNUSED; if (len == 0) - return 1; + return true; if (!targetm.use_by_pieces_infrastructure_p (len, align, memsetp ? SET_BY_PIECES : STORE_BY_PIECES, optimize_insn_for_speed_p ())) - return 0; + return false; align = alignment_for_piecewise_move (STORE_MAX_PIECES, align); @@ -1749,7 +1749,7 @@ can_store_by_pieces (unsigned HOST_WIDE_INT len, vector mode for the memset expander. */ if (!((memsetp && VECTOR_MODE_P (mode)) || targetm.legitimate_constant_p (mode, cst))) - return 0; + return false; if (!reverse) offset += size; @@ -1765,7 +1765,7 @@ can_store_by_pieces (unsigned HOST_WIDE_INT len, gcc_assert (!l); } - return 1; + return true; } /* Generate several move instructions to store LEN bytes generated by @@ -6868,9 +6868,9 @@ complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts, return count_type_elements (type, true) == num_elts; } -/* Return 1 if EXP contains mostly (3/4) zeros. */ +/* Return true if EXP contains mostly (3/4) zeros. */ -static int +static bool mostly_zeros_p (const_tree exp) { if (TREE_CODE (exp) == CONSTRUCTOR) @@ -6886,9 +6886,9 @@ mostly_zeros_p (const_tree exp) return initializer_zerop (exp); } -/* Return 1 if EXP contains all zeros. */ +/* Return true if EXP contains all zeros. */ -static int +static bool all_zeros_p (const_tree exp) { if (TREE_CODE (exp) == CONSTRUCTOR) @@ -7146,10 +7146,10 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size, { tree value, index; unsigned HOST_WIDE_INT i; - int need_to_clear; + bool need_to_clear; tree domain; tree elttype = TREE_TYPE (type); - int const_bounds_p; + bool const_bounds_p; HOST_WIDE_INT minelt = 0; HOST_WIDE_INT maxelt = 0; @@ -7173,9 +7173,9 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size, the whole array first. Similarly if this is static constructor of a non-BLKmode object. */ if (cleared) - need_to_clear = 0; + need_to_clear = false; else if (REG_P (target) && TREE_STATIC (exp)) - need_to_clear = 1; + need_to_clear = true; else { unsigned HOST_WIDE_INT idx; @@ -7200,7 +7200,7 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size, if (! tree_fits_uhwi_p (lo_index) || ! tree_fits_uhwi_p (hi_index)) { - need_to_clear = 1; + need_to_clear = true; break; } @@ -7221,7 +7221,7 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size, if (! need_to_clear && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count)) - need_to_clear = 1; + need_to_clear = true; } if (need_to_clear && maybe_gt (size, 0)) @@ -7413,7 +7413,7 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size, unsigned HOST_WIDE_INT idx; constructor_elt *ce; int i; - int need_to_clear; + bool need_to_clear; insn_code icode = CODE_FOR_nothing; tree elt; tree elttype = TREE_TYPE (type); @@ -7511,9 +7511,9 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size, clear the whole array first. Similarly if this is static constructor of a non-BLKmode object. */ if (cleared) - need_to_clear = 0; + need_to_clear = false; else if (REG_P (target) && TREE_STATIC (exp)) - need_to_clear = 1; + need_to_clear = true; else { unsigned HOST_WIDE_INT count = 0, zero_count = 0; @@ -8283,15 +8283,15 @@ force_operand (rtx value, rtx target) return value; } -/* Subroutine of expand_expr: return nonzero iff there is no way that +/* Subroutine of expand_expr: return true iff there is no way that EXP can reference X, which is being modified. TOP_P is nonzero if this call is going to be used to determine whether we need a temporary for EXP, as opposed to a recursive call to this function. - It is always safe for this routine to return zero since it merely + It is always safe for this routine to return false since it merely searches for optimization opportunities. */ -int +bool safe_from_p (const_rtx x, tree exp, int top_p) { rtx exp_rtl = 0; @@ -8316,7 +8316,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) && (XEXP (x, 0) == virtual_outgoing_args_rtx || (GET_CODE (XEXP (x, 0)) == PLUS && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))) - return 1; + return true; /* If this is a subreg of a hard register, declare it unsafe, otherwise, find the underlying pseudo. */ @@ -8324,7 +8324,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) { x = SUBREG_REG (x); if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) - return 0; + return false; } /* Now look at our tree code and possibly recurse. */ @@ -8335,7 +8335,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) break; case tcc_constant: - return 1; + return true; case tcc_exceptional: if (TREE_CODE (exp) == TREE_LIST) @@ -8343,10 +8343,10 @@ safe_from_p (const_rtx x, tree exp, int top_p) while (1) { if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0)) - return 0; + return false; exp = TREE_CHAIN (exp); if (!exp) - return 1; + return true; if (TREE_CODE (exp) != TREE_LIST) return safe_from_p (x, exp, 0); } @@ -8359,13 +8359,13 @@ safe_from_p (const_rtx x, tree exp, int top_p) FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce) if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0)) || !safe_from_p (x, ce->value, 0)) - return 0; - return 1; + return false; + return true; } else if (TREE_CODE (exp) == ERROR_MARK) - return 1; /* An already-visited SAVE_EXPR? */ + return true; /* An already-visited SAVE_EXPR? */ else - return 0; + return false; case tcc_statement: /* The only case we look at here is the DECL_INITIAL inside a @@ -8378,7 +8378,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) case tcc_binary: case tcc_comparison: if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0)) - return 0; + return false; /* Fall through. */ case tcc_unary: @@ -8400,7 +8400,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) if (staticp (TREE_OPERAND (exp, 0)) || TREE_STATIC (exp) || safe_from_p (x, TREE_OPERAND (exp, 0), 0)) - return 1; + return true; /* Otherwise, the only way this can conflict is if we are taking the address of a DECL a that address if part of X, which is @@ -8410,7 +8410,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) { if (!DECL_RTL_SET_P (exp) || !MEM_P (DECL_RTL (exp))) - return 0; + return false; else exp_rtl = XEXP (DECL_RTL (exp), 0); } @@ -8420,7 +8420,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) if (MEM_P (x) && alias_sets_conflict_p (MEM_ALIAS_SET (x), get_alias_set (exp))) - return 0; + return false; break; case CALL_EXPR: @@ -8428,7 +8428,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) all of memory. */ if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) || MEM_P (x)) - return 0; + return false; break; case WITH_CLEANUP_EXPR: @@ -8451,7 +8451,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) for (i = 0; i < nops; i++) if (TREE_OPERAND (exp, i) != 0 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0)) - return 0; + return false; break; @@ -8469,7 +8469,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) exp_rtl = SUBREG_REG (exp_rtl); if (REG_P (exp_rtl) && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER) - return 0; + return false; } /* If the rtl is X, then it is not safe. Otherwise, it is unless both @@ -8480,7 +8480,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) } /* If we reach here, it is safe. */ - return 1; + return true; } @@ -12195,11 +12195,11 @@ reduce_to_bit_field_precision (rtx exp, rtx target, tree type) } } -/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that +/* Subroutine of above: returns true if OFFSET corresponds to an offset that when applied to the address of EXP produces an address known to be aligned more than BIGGEST_ALIGNMENT. */ -static int +static bool is_aligning_offset (const_tree offset, const_tree exp) { /* Strip off any conversions. */ @@ -12213,7 +12213,7 @@ is_aligning_offset (const_tree offset, const_tree exp) || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1)) - return 0; + return false; /* Look at the first operand of BIT_AND_EXPR and strip any conversion. It must be NEGATE_EXPR. Then strip any more conversions. */ @@ -12222,7 +12222,7 @@ is_aligning_offset (const_tree offset, const_tree exp) offset = TREE_OPERAND (offset, 0); if (TREE_CODE (offset) != NEGATE_EXPR) - return 0; + return false; offset = TREE_OPERAND (offset, 0); while (CONVERT_EXPR_P (offset)) @@ -13220,12 +13220,12 @@ do_store_flag (sepops ops, rtx target, machine_mode mode) && !TYPE_UNSIGNED (ops->type)) ? -1 : 1); } -/* Attempt to generate a casesi instruction. Returns 1 if successful, - 0 otherwise (i.e. if there is no casesi instruction). +/* Attempt to generate a casesi instruction. Returns true if successful, + false otherwise (i.e. if there is no casesi instruction). DEFAULT_PROBABILITY is the probability of jumping to the default label. */ -int +bool try_casesi (tree index_type, tree index_expr, tree minval, tree range, rtx table_label, rtx default_label, rtx fallback_label, profile_probability default_probability) @@ -13235,7 +13235,7 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range, rtx op1, op2, index; if (! targetm.have_casesi ()) - return 0; + return false; /* The index must be some form of integer. Convert it to SImode. */ scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type); @@ -13279,7 +13279,7 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range, ? default_label : fallback_label)); expand_jump_insn (targetm.code_for_casesi, 5, ops); - return 1; + return true; } /* Attempt to generate a tablejump instruction; same concept. */ @@ -13374,7 +13374,7 @@ do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label, emit_barrier (); } -int +bool try_tablejump (tree index_type, tree index_expr, tree minval, tree range, rtx table_label, rtx default_label, profile_probability default_probability) @@ -13382,7 +13382,7 @@ try_tablejump (tree index_type, tree index_expr, tree minval, tree range, rtx index; if (! targetm.have_tablejump ()) - return 0; + return false; index_expr = fold_build2 (MINUS_EXPR, index_type, fold_convert (index_type, index_expr), @@ -13396,7 +13396,7 @@ try_tablejump (tree index_type, tree index_expr, tree minval, tree range, expand_normal (range), TYPE_UNSIGNED (TREE_TYPE (range))), table_label, default_label, default_probability); - return 1; + return true; } /* Return a CONST_VECTOR rtx representing vector mask for |