diff options
author | Trevor Saunders <tbsaunde+gcc@tbsaunde.org> | 2015-07-09 02:49:51 +0000 |
---|---|---|
committer | Trevor Saunders <tbsaunde@gcc.gnu.org> | 2015-07-09 02:49:51 +0000 |
commit | f1657f05ef0fbc5183051de277141159f15a9472 (patch) | |
tree | e4dc3a562a1e0900ee3ab1384aba87e00500bcef /gcc/postreload.c | |
parent | 1acfc9ca30d79133bca85399440c0d7fa123fb51 (diff) | |
download | gcc-f1657f05ef0fbc5183051de277141159f15a9472.zip gcc-f1657f05ef0fbc5183051de277141159f15a9472.tar.gz gcc-f1657f05ef0fbc5183051de277141159f15a9472.tar.bz2 |
reduce conditional compilation for LOAD_EXTEND_OP
Provide a default in files where that is possible, so that everything
else there can be unconditionally compiled. However rtlanal.c and
reload.c do tricky things that break providing a global default, so we
can't do that yet.
gcc/ChangeLog:
2015-07-08 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* combine.c (try_combine): Don't check if LOAD_EXTEND_OP is
defined.
(simplify_set): Likewise.
* cse.c (cse_insn): Likewise.
* fold-const.c (fold_single_bit_test): Likewise.
(fold_unary_loc): Likewise.
* postreload.c (reload_cse_simplify_set): Likewise.
(reload_cse_simplify_operands): Likewise.
From-SVN: r225591
Diffstat (limited to 'gcc/postreload.c')
-rw-r--r-- | gcc/postreload.c | 18 |
1 files changed, 6 insertions, 12 deletions
diff --git a/gcc/postreload.c b/gcc/postreload.c index 3db2c07..03babc8 100644 --- a/gcc/postreload.c +++ b/gcc/postreload.c @@ -54,6 +54,10 @@ along with GCC; see the file COPYING3. If not see #include "tree-pass.h" #include "dbgcnt.h" +#ifndef LOAD_EXTEND_OP +#define LOAD_EXTEND_OP(M) UNKNOWN +#endif + static int reload_cse_noop_set_p (rtx); static bool reload_cse_simplify (rtx_insn *, rtx); static void reload_cse_regs_1 (void); @@ -254,9 +258,7 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn) int old_cost; cselib_val *val; struct elt_loc_list *l; -#ifdef LOAD_EXTEND_OP enum rtx_code extend_op = UNKNOWN; -#endif bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); dreg = true_regnum (SET_DEST (set)); @@ -269,7 +271,6 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn) dclass = REGNO_REG_CLASS (dreg); -#ifdef LOAD_EXTEND_OP /* When replacing a memory with a register, we need to honor assumptions that combine made wrt the contents of sign bits. We'll do this by generating an extend instruction instead of a reg->reg copy. Thus @@ -279,7 +280,6 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn) && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN && !REG_P (SET_DEST (set))) return 0; -#endif val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode); if (! val) @@ -301,7 +301,6 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn) if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0)) { -#ifdef LOAD_EXTEND_OP if (extend_op != UNKNOWN) { wide_int result; @@ -326,19 +325,17 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn) } this_rtx = immed_wide_int_const (result, word_mode); } -#endif + this_cost = set_src_cost (this_rtx, GET_MODE (SET_DEST (set)), speed); } else if (REG_P (this_rtx)) { -#ifdef LOAD_EXTEND_OP if (extend_op != UNKNOWN) { this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx); this_cost = set_src_cost (this_rtx, word_mode, speed); } else -#endif this_cost = register_move_cost (GET_MODE (this_rtx), REGNO_REG_CLASS (REGNO (this_rtx)), dclass); @@ -353,7 +350,6 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn) && REG_P (this_rtx) && !REG_P (SET_SRC (set)))) { -#ifdef LOAD_EXTEND_OP if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD && extend_op != UNKNOWN #ifdef CANNOT_CHANGE_MODE_CLASS @@ -367,7 +363,6 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn) ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set)); validate_change (insn, &SET_DEST (set), wide_dest, 1); } -#endif validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1); old_cost = this_cost, did_change = 1; @@ -439,7 +434,6 @@ reload_cse_simplify_operands (rtx_insn *insn, rtx testreg) continue; op = recog_data.operand[i]; -#ifdef LOAD_EXTEND_OP if (MEM_P (op) && GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD && LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN) @@ -490,7 +484,7 @@ reload_cse_simplify_operands (rtx_insn *insn, rtx testreg) safe to optimize, but is it worth the trouble? */ continue; } -#endif /* LOAD_EXTEND_OP */ + if (side_effects_p (op)) continue; v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode); |