diff options
Diffstat (limited to 'gcc/combine.c')
-rw-r--r-- | gcc/combine.c | 1013 |
1 files changed, 101 insertions, 912 deletions
diff --git a/gcc/combine.c b/gcc/combine.c index 5a1f89b..64af27c 100644 --- a/gcc/combine.c +++ b/gcc/combine.c @@ -90,6 +90,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "real.h" #include "toplev.h" #include "target.h" +#include "rtlhooks-def.h" /* Number of attempts to combine instructions in this function. */ @@ -133,12 +134,6 @@ static int max_uid_cuid; #define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \ (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1) -#define nonzero_bits(X, M) \ - cached_nonzero_bits (X, M, NULL_RTX, VOIDmode, 0) - -#define num_sign_bit_copies(X, M) \ - cached_num_sign_bit_copies (X, M, NULL_RTX, VOIDmode, 0) - /* Maximum register number, which is the size of the tables below. */ static unsigned int combine_max_regno; @@ -337,6 +332,13 @@ static struct undobuf undobuf; static int n_occurrences; +static rtx reg_nonzero_bits_for_combine (rtx, enum machine_mode, rtx, + enum machine_mode, + unsigned HOST_WIDE_INT, + unsigned HOST_WIDE_INT *); +static rtx reg_num_sign_bit_copies_for_combine (rtx, enum machine_mode, rtx, + enum machine_mode, + unsigned int, unsigned int *); static void do_SUBST (rtx *, rtx); static void do_SUBST_INT (int *, int); static void init_reg_last (void); @@ -372,17 +374,6 @@ static rtx make_field_assignment (rtx); static rtx apply_distributive_law (rtx); static rtx simplify_and_const_int (rtx, enum machine_mode, rtx, unsigned HOST_WIDE_INT); -static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode, - rtx, enum machine_mode, - unsigned HOST_WIDE_INT); -static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx, - enum machine_mode, - unsigned HOST_WIDE_INT); -static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx, - enum machine_mode, - unsigned int); -static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx, - enum machine_mode, unsigned int); static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code, HOST_WIDE_INT, enum machine_mode, int *); static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx, @@ -413,6 +404,21 @@ static enum rtx_code combine_reversed_comparison_code (rtx); static int unmentioned_reg_p_1 (rtx *, void *); static bool unmentioned_reg_p (rtx, rtx); + +/* It is not safe to use ordinary gen_lowpart in combine. + See comments in gen_lowpart_for_combine. */ +#undef RTL_HOOKS_GEN_LOWPART +#define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine + +#undef RTL_HOOKS_REG_NONZERO_REG_BITS +#define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine + +#undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES +#define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine + +static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER; + + /* Substitute NEWVAL, an rtx expression, into INTO, a place in some insn. The substitution can be undone by undo_all. If INTO is already set to NEWVAL, do not record this change. Because computing NEWVAL might @@ -522,9 +528,7 @@ combine_instructions (rtx f, unsigned int nregs) combine_max_regno = nregs; - /* It is not safe to use ordinary gen_lowpart in combine. - See comments in gen_lowpart_for_combine. */ - gen_lowpart = gen_lowpart_for_combine; + rtl_hooks = combine_rtl_hooks; reg_stat = xcalloc (nregs, sizeof (struct reg_stat)); @@ -777,7 +781,7 @@ combine_instructions (rtx f, unsigned int nregs) total_successes += combine_successes; nonzero_sign_valid = 0; - gen_lowpart = gen_lowpart_general; + rtl_hooks = general_rtl_hooks; /* Make recognizer allow volatile MEMs again. */ init_recog (); @@ -7997,577 +8001,78 @@ simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop, return x; } -#define nonzero_bits_with_known(X, MODE) \ - cached_nonzero_bits (X, MODE, known_x, known_mode, known_ret) - -/* The function cached_nonzero_bits is a wrapper around nonzero_bits1. - It avoids exponential behavior in nonzero_bits1 when X has - identical subexpressions on the first or the second level. */ - -static unsigned HOST_WIDE_INT -cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x, - enum machine_mode known_mode, - unsigned HOST_WIDE_INT known_ret) -{ - if (x == known_x && mode == known_mode) - return known_ret; - - /* Try to find identical subexpressions. If found call - nonzero_bits1 on X with the subexpressions as KNOWN_X and the - precomputed value for the subexpression as KNOWN_RET. */ - - if (ARITHMETIC_P (x)) - { - rtx x0 = XEXP (x, 0); - rtx x1 = XEXP (x, 1); - - /* Check the first level. */ - if (x0 == x1) - return nonzero_bits1 (x, mode, x0, mode, - nonzero_bits_with_known (x0, mode)); - - /* Check the second level. */ - if (ARITHMETIC_P (x0) - && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) - return nonzero_bits1 (x, mode, x1, mode, - nonzero_bits_with_known (x1, mode)); - - if (ARITHMETIC_P (x1) - && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) - return nonzero_bits1 (x, mode, x0, mode, - nonzero_bits_with_known (x0, mode)); - } - - return nonzero_bits1 (x, mode, known_x, known_mode, known_ret); -} - -/* We let num_sign_bit_copies recur into nonzero_bits as that is useful. - We don't let nonzero_bits recur into num_sign_bit_copies, because that - is less useful. We can't allow both, because that results in exponential - run time recursion. There is a nullstone testcase that triggered - this. This macro avoids accidental uses of num_sign_bit_copies. */ -#define cached_num_sign_bit_copies() - -/* Given an expression, X, compute which bits in X can be nonzero. +/* Given a REG, X, compute which bits in X can be nonzero. We don't care about bits outside of those defined in MODE. For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is a shift, AND, or zero_extract, we can do better. */ -static unsigned HOST_WIDE_INT -nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x, - enum machine_mode known_mode, - unsigned HOST_WIDE_INT known_ret) +static rtx +reg_nonzero_bits_for_combine (rtx x, enum machine_mode mode, + rtx known_x ATTRIBUTE_UNUSED, + enum machine_mode known_mode ATTRIBUTE_UNUSED, + unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED, + unsigned HOST_WIDE_INT *nonzero) { - unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode); - unsigned HOST_WIDE_INT inner_nz; - enum rtx_code code; - unsigned int mode_width = GET_MODE_BITSIZE (mode); rtx tem; - /* For floating-point values, assume all bits are needed. */ - if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)) - return nonzero; + /* If X is a register whose nonzero bits value is current, use it. + Otherwise, if X is a register whose value we can find, use that + value. Otherwise, use the previously-computed global nonzero bits + for this register. */ - /* If X is wider than MODE, use its mode instead. */ - if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width) + if (reg_stat[REGNO (x)].last_set_value != 0 + && (reg_stat[REGNO (x)].last_set_mode == mode + || (GET_MODE_CLASS (reg_stat[REGNO (x)].last_set_mode) == MODE_INT + && GET_MODE_CLASS (mode) == MODE_INT)) + && (reg_stat[REGNO (x)].last_set_label == label_tick + || (REGNO (x) >= FIRST_PSEUDO_REGISTER + && REG_N_SETS (REGNO (x)) == 1 + && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, + REGNO (x)))) + && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid) { - mode = GET_MODE (x); - nonzero = GET_MODE_MASK (mode); - mode_width = GET_MODE_BITSIZE (mode); + *nonzero &= reg_stat[REGNO (x)].last_set_nonzero_bits; + return NULL; } - if (mode_width > HOST_BITS_PER_WIDE_INT) - /* Our only callers in this case look for single bit values. So - just return the mode mask. Those tests will then be false. */ - return nonzero; - -#ifndef WORD_REGISTER_OPERATIONS - /* If MODE is wider than X, but both are a single word for both the host - and target machines, we can compute this from which bits of the - object might be nonzero in its own mode, taking into account the fact - that on many CISC machines, accessing an object in a wider mode - causes the high-order bits to become undefined. So they are - not known to be zero. */ - - if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode - && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD - && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT - && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x))) - { - nonzero &= nonzero_bits_with_known (x, GET_MODE (x)); - nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)); - return nonzero; - } -#endif + tem = get_last_value (x); - code = GET_CODE (x); - switch (code) + if (tem) { - case REG: -#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) - /* If pointers extend unsigned and this is a pointer in Pmode, say that - all the bits above ptr_mode are known to be zero. */ - if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode - && REG_POINTER (x)) - nonzero &= GET_MODE_MASK (ptr_mode); -#endif - - /* Include declared information about alignment of pointers. */ - /* ??? We don't properly preserve REG_POINTER changes across - pointer-to-integer casts, so we can't trust it except for - things that we know must be pointers. See execute/960116-1.c. */ - if ((x == stack_pointer_rtx - || x == frame_pointer_rtx - || x == arg_pointer_rtx) - && REGNO_POINTER_ALIGN (REGNO (x))) - { - unsigned HOST_WIDE_INT alignment - = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT; - -#ifdef PUSH_ROUNDING - /* If PUSH_ROUNDING is defined, it is possible for the - stack to be momentarily aligned only to that amount, - so we pick the least alignment. */ - if (x == stack_pointer_rtx && PUSH_ARGS) - alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1), - alignment); -#endif - - nonzero &= ~(alignment - 1); - } - - /* If X is a register whose nonzero bits value is current, use it. - Otherwise, if X is a register whose value we can find, use that - value. Otherwise, use the previously-computed global nonzero bits - for this register. */ - - if (reg_stat[REGNO (x)].last_set_value != 0 - && (reg_stat[REGNO (x)].last_set_mode == mode - || (GET_MODE_CLASS (reg_stat[REGNO (x)].last_set_mode) == MODE_INT - && GET_MODE_CLASS (mode) == MODE_INT)) - && (reg_stat[REGNO (x)].last_set_label == label_tick - || (REGNO (x) >= FIRST_PSEUDO_REGISTER - && REG_N_SETS (REGNO (x)) == 1 - && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, - REGNO (x)))) - && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid) - return reg_stat[REGNO (x)].last_set_nonzero_bits & nonzero; - - tem = get_last_value (x); - - if (tem) - { -#ifdef SHORT_IMMEDIATES_SIGN_EXTEND - /* If X is narrower than MODE and TEM is a non-negative - constant that would appear negative in the mode of X, - sign-extend it for use in reg_stat[].nonzero_bits because - some machines (maybe most) will actually do the sign-extension - and this is the conservative approach. - - ??? For 2.5, try to tighten up the MD files in this regard - instead of this kludge. */ - - if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width - && GET_CODE (tem) == CONST_INT - && INTVAL (tem) > 0 - && 0 != (INTVAL (tem) - & ((HOST_WIDE_INT) 1 - << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) - tem = GEN_INT (INTVAL (tem) - | ((HOST_WIDE_INT) (-1) - << GET_MODE_BITSIZE (GET_MODE (x)))); -#endif - return nonzero_bits_with_known (tem, mode) & nonzero; - } - else if (nonzero_sign_valid && reg_stat[REGNO (x)].nonzero_bits) - { - unsigned HOST_WIDE_INT mask = reg_stat[REGNO (x)].nonzero_bits; - - if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width) - /* We don't know anything about the upper bits. */ - mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x)); - return nonzero & mask; - } - else - return nonzero; - - case CONST_INT: #ifdef SHORT_IMMEDIATES_SIGN_EXTEND - /* If X is negative in MODE, sign-extend the value. */ - if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD - && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1)))) - return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width)); -#endif - - return INTVAL (x); - - case MEM: -#ifdef LOAD_EXTEND_OP - /* In many, if not most, RISC machines, reading a byte from memory - zeros the rest of the register. Noticing that fact saves a lot - of extra zero-extends. */ - if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND) - nonzero &= GET_MODE_MASK (GET_MODE (x)); -#endif - break; - - case EQ: case NE: - case UNEQ: case LTGT: - case GT: case GTU: case UNGT: - case LT: case LTU: case UNLT: - case GE: case GEU: case UNGE: - case LE: case LEU: case UNLE: - case UNORDERED: case ORDERED: - - /* If this produces an integer result, we know which bits are set. - Code here used to clear bits outside the mode of X, but that is - now done above. */ - - if (GET_MODE_CLASS (mode) == MODE_INT - && mode_width <= HOST_BITS_PER_WIDE_INT) - nonzero = STORE_FLAG_VALUE; - break; - - case NEG: -#if 0 - /* Disabled to avoid exponential mutual recursion between nonzero_bits - and num_sign_bit_copies. */ - if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) - == GET_MODE_BITSIZE (GET_MODE (x))) - nonzero = 1; -#endif - - if (GET_MODE_SIZE (GET_MODE (x)) < mode_width) - nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x))); - break; - - case ABS: -#if 0 - /* Disabled to avoid exponential mutual recursion between nonzero_bits - and num_sign_bit_copies. */ - if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) - == GET_MODE_BITSIZE (GET_MODE (x))) - nonzero = 1; -#endif - break; - - case TRUNCATE: - nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode) - & GET_MODE_MASK (mode)); - break; - - case ZERO_EXTEND: - nonzero &= nonzero_bits_with_known (XEXP (x, 0), mode); - if (GET_MODE (XEXP (x, 0)) != VOIDmode) - nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); - break; - - case SIGN_EXTEND: - /* If the sign bit is known clear, this is the same as ZERO_EXTEND. - Otherwise, show all the bits in the outer mode but not the inner - may be nonzero. */ - inner_nz = nonzero_bits_with_known (XEXP (x, 0), mode); - if (GET_MODE (XEXP (x, 0)) != VOIDmode) - { - inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); - if (inner_nz - & (((HOST_WIDE_INT) 1 - << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))) - inner_nz |= (GET_MODE_MASK (mode) - & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))); - } - - nonzero &= inner_nz; - break; - - case AND: - nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode) - & nonzero_bits_with_known (XEXP (x, 1), mode)); - break; - - case XOR: case IOR: - case UMIN: case UMAX: case SMIN: case SMAX: - { - unsigned HOST_WIDE_INT nonzero0 = - nonzero_bits_with_known (XEXP (x, 0), mode); - - /* Don't call nonzero_bits for the second time if it cannot change - anything. */ - if ((nonzero & nonzero0) != nonzero) - nonzero &= (nonzero0 - | nonzero_bits_with_known (XEXP (x, 1), mode)); - } - break; - - case PLUS: case MINUS: - case MULT: - case DIV: case UDIV: - case MOD: case UMOD: - /* We can apply the rules of arithmetic to compute the number of - high- and low-order zero bits of these operations. We start by - computing the width (position of the highest-order nonzero bit) - and the number of low-order zero bits for each value. */ - { - unsigned HOST_WIDE_INT nz0 = - nonzero_bits_with_known (XEXP (x, 0), mode); - unsigned HOST_WIDE_INT nz1 = - nonzero_bits_with_known (XEXP (x, 1), mode); - int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1; - int width0 = floor_log2 (nz0) + 1; - int width1 = floor_log2 (nz1) + 1; - int low0 = floor_log2 (nz0 & -nz0); - int low1 = floor_log2 (nz1 & -nz1); - HOST_WIDE_INT op0_maybe_minusp - = (nz0 & ((HOST_WIDE_INT) 1 << sign_index)); - HOST_WIDE_INT op1_maybe_minusp - = (nz1 & ((HOST_WIDE_INT) 1 << sign_index)); - unsigned int result_width = mode_width; - int result_low = 0; - - switch (code) - { - case PLUS: - result_width = MAX (width0, width1) + 1; - result_low = MIN (low0, low1); - break; - case MINUS: - result_low = MIN (low0, low1); - break; - case MULT: - result_width = width0 + width1; - result_low = low0 + low1; - break; - case DIV: - if (width1 == 0) - break; - if (! op0_maybe_minusp && ! op1_maybe_minusp) - result_width = width0; - break; - case UDIV: - if (width1 == 0) - break; - result_width = width0; - break; - case MOD: - if (width1 == 0) - break; - if (! op0_maybe_minusp && ! op1_maybe_minusp) - result_width = MIN (width0, width1); - result_low = MIN (low0, low1); - break; - case UMOD: - if (width1 == 0) - break; - result_width = MIN (width0, width1); - result_low = MIN (low0, low1); - break; - default: - abort (); - } - - if (result_width < mode_width) - nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1; - - if (result_low > 0) - nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1); - -#ifdef POINTERS_EXTEND_UNSIGNED - /* If pointers extend unsigned and this is an addition or subtraction - to a pointer in Pmode, all the bits above ptr_mode are known to be - zero. */ - if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode - && (code == PLUS || code == MINUS) - && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0))) - nonzero &= GET_MODE_MASK (ptr_mode); -#endif - } - break; - - case ZERO_EXTRACT: - if (GET_CODE (XEXP (x, 1)) == CONST_INT - && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) - nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1; - break; - - case SUBREG: - /* If this is a SUBREG formed for a promoted variable that has - been zero-extended, we know that at least the high-order bits - are zero, though others might be too. */ - - if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0) - nonzero = (GET_MODE_MASK (GET_MODE (x)) - & nonzero_bits_with_known (SUBREG_REG (x), GET_MODE (x))); - - /* If the inner mode is a single word for both the host and target - machines, we can compute this from which bits of the inner - object might be nonzero. */ - if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD - && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - <= HOST_BITS_PER_WIDE_INT)) - { - nonzero &= nonzero_bits_with_known (SUBREG_REG (x), mode); - -#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP) - /* If this is a typical RISC machine, we only have to worry - about the way loads are extended. */ - if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND - ? (((nonzero - & (((unsigned HOST_WIDE_INT) 1 - << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))) - != 0)) - : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND) - || GET_CODE (SUBREG_REG (x)) != MEM) + /* If X is narrower than MODE and TEM is a non-negative + constant that would appear negative in the mode of X, + sign-extend it for use in reg_nonzero_bits because some + machines (maybe most) will actually do the sign-extension + and this is the conservative approach. + + ??? For 2.5, try to tighten up the MD files in this regard + instead of this kludge. */ + + if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode) + && GET_CODE (tem) == CONST_INT + && INTVAL (tem) > 0 + && 0 != (INTVAL (tem) + & ((HOST_WIDE_INT) 1 + << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) + tem = GEN_INT (INTVAL (tem) + | ((HOST_WIDE_INT) (-1) + << GET_MODE_BITSIZE (GET_MODE (x)))); #endif - { - /* On many CISC machines, accessing an object in a wider mode - causes the high-order bits to become undefined. So they are - not known to be zero. */ - if (GET_MODE_SIZE (GET_MODE (x)) - > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) - nonzero |= (GET_MODE_MASK (GET_MODE (x)) - & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))); - } - } - break; - - case ASHIFTRT: - case LSHIFTRT: - case ASHIFT: - case ROTATE: - /* The nonzero bits are in two classes: any bits within MODE - that aren't in GET_MODE (x) are always significant. The rest of the - nonzero bits are those that are significant in the operand of - the shift when shifted the appropriate number of bits. This - shows that high-order bits are cleared by the right shift and - low-order bits by left shifts. */ - if (GET_CODE (XEXP (x, 1)) == CONST_INT - && INTVAL (XEXP (x, 1)) >= 0 - && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) - { - enum machine_mode inner_mode = GET_MODE (x); - unsigned int width = GET_MODE_BITSIZE (inner_mode); - int count = INTVAL (XEXP (x, 1)); - unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); - unsigned HOST_WIDE_INT op_nonzero = - nonzero_bits_with_known (XEXP (x, 0), mode); - unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask; - unsigned HOST_WIDE_INT outer = 0; - - if (mode_width > width) - outer = (op_nonzero & nonzero & ~mode_mask); - - if (code == LSHIFTRT) - inner >>= count; - else if (code == ASHIFTRT) - { - inner >>= count; - - /* If the sign bit may have been nonzero before the shift, we - need to mark all the places it could have been copied to - by the shift as possibly nonzero. */ - if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count))) - inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count); - } - else if (code == ASHIFT) - inner <<= count; - else - inner = ((inner << (count % width) - | (inner >> (width - (count % width)))) & mode_mask); - - nonzero &= (outer | inner); - } - break; - - case FFS: - case POPCOUNT: - /* This is at most the number of bits in the mode. */ - nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1; - break; - - case CLZ: - /* If CLZ has a known value at zero, then the nonzero bits are - that value, plus the number of bits in the mode minus one. */ - if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) - nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; - else - nonzero = -1; - break; - - case CTZ: - /* If CTZ has a known value at zero, then the nonzero bits are - that value, plus the number of bits in the mode minus one. */ - if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) - nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; - else - nonzero = -1; - break; - - case PARITY: - nonzero = 1; - break; - - case IF_THEN_ELSE: - nonzero &= (nonzero_bits_with_known (XEXP (x, 1), mode) - | nonzero_bits_with_known (XEXP (x, 2), mode)); - break; - - default: - break; + return tem; } - - return nonzero; -} - -/* See the macro definition above. */ -#undef cached_num_sign_bit_copies - -#define num_sign_bit_copies_with_known(X, M) \ - cached_num_sign_bit_copies (X, M, known_x, known_mode, known_ret) - -/* The function cached_num_sign_bit_copies is a wrapper around - num_sign_bit_copies1. It avoids exponential behavior in - num_sign_bit_copies1 when X has identical subexpressions on the - first or the second level. */ - -static unsigned int -cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x, - enum machine_mode known_mode, - unsigned int known_ret) -{ - if (x == known_x && mode == known_mode) - return known_ret; - - /* Try to find identical subexpressions. If found call - num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and - the precomputed value for the subexpression as KNOWN_RET. */ - - if (ARITHMETIC_P (x)) + else if (nonzero_sign_valid && reg_stat[REGNO (x)].nonzero_bits) { - rtx x0 = XEXP (x, 0); - rtx x1 = XEXP (x, 1); - - /* Check the first level. */ - if (x0 == x1) - return - num_sign_bit_copies1 (x, mode, x0, mode, - num_sign_bit_copies_with_known (x0, mode)); - - /* Check the second level. */ - if (ARITHMETIC_P (x0) - && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) - return - num_sign_bit_copies1 (x, mode, x1, mode, - num_sign_bit_copies_with_known (x1, mode)); + unsigned HOST_WIDE_INT mask = reg_stat[REGNO (x)].nonzero_bits; - if (ARITHMETIC_P (x1) - && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) - return - num_sign_bit_copies1 (x, mode, x0, mode, - num_sign_bit_copies_with_known (x0, mode)); + if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)) + /* We don't know anything about the upper bits. */ + mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x)); + *nonzero &= mask; } - return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret); + return NULL; } /* Return the number of bits at the high-order end of X that are known to @@ -8575,354 +8080,38 @@ cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x, VOIDmode, X will be used in its own mode. The returned value will always be between 1 and the number of bits in MODE. */ -static unsigned int -num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x, - enum machine_mode known_mode, - unsigned int known_ret) +static rtx +reg_num_sign_bit_copies_for_combine (rtx x, enum machine_mode mode, + rtx known_x ATTRIBUTE_UNUSED, + enum machine_mode known_mode + ATTRIBUTE_UNUSED, + unsigned int known_ret ATTRIBUTE_UNUSED, + unsigned int *result) { - enum rtx_code code = GET_CODE (x); - unsigned int bitwidth; - int num0, num1, result; - unsigned HOST_WIDE_INT nonzero; rtx tem; - /* If we weren't given a mode, use the mode of X. If the mode is still - VOIDmode, we don't know anything. Likewise if one of the modes is - floating-point. */ - - if (mode == VOIDmode) - mode = GET_MODE (x); - - if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))) - return 1; - - bitwidth = GET_MODE_BITSIZE (mode); - - /* For a smaller object, just ignore the high bits. */ - if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x))) + if (reg_stat[REGNO (x)].last_set_value != 0 + && reg_stat[REGNO (x)].last_set_mode == mode + && (reg_stat[REGNO (x)].last_set_label == label_tick + || (REGNO (x) >= FIRST_PSEUDO_REGISTER + && REG_N_SETS (REGNO (x)) == 1 + && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, + REGNO (x)))) + && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid) { - num0 = num_sign_bit_copies_with_known (x, GET_MODE (x)); - return MAX (1, - num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)); - } - - if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x))) - { -#ifndef WORD_REGISTER_OPERATIONS - /* If this machine does not do all register operations on the entire - register and MODE is wider than the mode of X, we can say nothing - at all about the high-order bits. */ - return 1; -#else - /* Likewise on machines that do, if the mode of the object is smaller - than a word and loads of that size don't sign extend, we can say - nothing about the high order bits. */ - if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD -#ifdef LOAD_EXTEND_OP - && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND -#endif - ) - return 1; -#endif + *result = reg_stat[REGNO (x)].last_set_sign_bit_copies; + return NULL; } - switch (code) - { - case REG: + tem = get_last_value (x); + if (tem != 0) + return tem; -#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) - /* If pointers extend signed and this is a pointer in Pmode, say that - all the bits above ptr_mode are known to be sign bit copies. */ - if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode - && REG_POINTER (x)) - return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1; -#endif - - if (reg_stat[REGNO (x)].last_set_value != 0 - && reg_stat[REGNO (x)].last_set_mode == mode - && (reg_stat[REGNO (x)].last_set_label == label_tick - || (REGNO (x) >= FIRST_PSEUDO_REGISTER - && REG_N_SETS (REGNO (x)) == 1 - && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, - REGNO (x)))) - && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid) - return reg_stat[REGNO (x)].last_set_sign_bit_copies; - - tem = get_last_value (x); - if (tem != 0) - return num_sign_bit_copies_with_known (tem, mode); - - if (nonzero_sign_valid && reg_stat[REGNO (x)].sign_bit_copies != 0 - && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth) - return reg_stat[REGNO (x)].sign_bit_copies; - break; - - case MEM: -#ifdef LOAD_EXTEND_OP - /* Some RISC machines sign-extend all loads of smaller than a word. */ - if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) - return MAX (1, ((int) bitwidth - - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1)); -#endif - break; - - case CONST_INT: - /* If the constant is negative, take its 1's complement and remask. - Then see how many zero bits we have. */ - nonzero = INTVAL (x) & GET_MODE_MASK (mode); - if (bitwidth <= HOST_BITS_PER_WIDE_INT - && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) - nonzero = (~nonzero) & GET_MODE_MASK (mode); - - return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); - - case SUBREG: - /* If this is a SUBREG for a promoted object that is sign-extended - and we are looking at it in a wider mode, we know that at least the - high-order bits are known to be sign bit copies. */ - - if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x)) - { - num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), mode); - return MAX ((int) bitwidth - - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1, - num0); - } - - /* For a smaller object, just ignore the high bits. */ - if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))) - { - num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), VOIDmode); - return MAX (1, (num0 - - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - - bitwidth))); - } - -#ifdef WORD_REGISTER_OPERATIONS -#ifdef LOAD_EXTEND_OP - /* For paradoxical SUBREGs on machines where all register operations - affect the entire register, just look inside. Note that we are - passing MODE to the recursive call, so the number of sign bit copies - will remain relative to that mode, not the inner mode. */ - - /* This works only if loads sign extend. Otherwise, if we get a - reload for the inner part, it may be loaded from the stack, and - then we lose all sign bit copies that existed before the store - to the stack. */ - - if ((GET_MODE_SIZE (GET_MODE (x)) - > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) - && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND - && GET_CODE (SUBREG_REG (x)) == MEM) - return num_sign_bit_copies_with_known (SUBREG_REG (x), mode); -#endif -#endif - break; - - case SIGN_EXTRACT: - if (GET_CODE (XEXP (x, 1)) == CONST_INT) - return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1))); - break; - - case SIGN_EXTEND: - return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - + num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode)); - - case TRUNCATE: - /* For a smaller object, just ignore the high bits. */ - num0 = num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode); - return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - - bitwidth))); - - case NOT: - return num_sign_bit_copies_with_known (XEXP (x, 0), mode); - - case ROTATE: case ROTATERT: - /* If we are rotating left by a number of bits less than the number - of sign bit copies, we can just subtract that amount from the - number. */ - if (GET_CODE (XEXP (x, 1)) == CONST_INT - && INTVAL (XEXP (x, 1)) >= 0 - && INTVAL (XEXP (x, 1)) < (int) bitwidth) - { - num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode); - return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1)) - : (int) bitwidth - INTVAL (XEXP (x, 1)))); - } - break; - - case NEG: - /* In general, this subtracts one sign bit copy. But if the value - is known to be positive, the number of sign bit copies is the - same as that of the input. Finally, if the input has just one bit - that might be nonzero, all the bits are copies of the sign bit. */ - num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode); - if (bitwidth > HOST_BITS_PER_WIDE_INT) - return num0 > 1 ? num0 - 1 : 1; - - nonzero = nonzero_bits (XEXP (x, 0), mode); - if (nonzero == 1) - return bitwidth; - - if (num0 > 1 - && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero)) - num0--; - - return num0; - - case IOR: case AND: case XOR: - case SMIN: case SMAX: case UMIN: case UMAX: - /* Logical operations will preserve the number of sign-bit copies. - MIN and MAX operations always return one of the operands. */ - num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode); - num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode); - return MIN (num0, num1); - - case PLUS: case MINUS: - /* For addition and subtraction, we can have a 1-bit carry. However, - if we are subtracting 1 from a positive number, there will not - be such a carry. Furthermore, if the positive number is known to - be 0 or 1, we know the result is either -1 or 0. */ - - if (code == PLUS && XEXP (x, 1) == constm1_rtx - && bitwidth <= HOST_BITS_PER_WIDE_INT) - { - nonzero = nonzero_bits (XEXP (x, 0), mode); - if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0) - return (nonzero == 1 || nonzero == 0 ? bitwidth - : bitwidth - floor_log2 (nonzero) - 1); - } - - num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode); - num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode); - result = MAX (1, MIN (num0, num1) - 1); - -#ifdef POINTERS_EXTEND_UNSIGNED - /* If pointers extend signed and this is an addition or subtraction - to a pointer in Pmode, all the bits above ptr_mode are known to be - sign bit copies. */ - if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode - && (code == PLUS || code == MINUS) - && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0))) - result = MAX ((int) (GET_MODE_BITSIZE (Pmode) - - GET_MODE_BITSIZE (ptr_mode) + 1), - result); -#endif - return result; - - case MULT: - /* The number of bits of the product is the sum of the number of - bits of both terms. However, unless one of the terms if known - to be positive, we must allow for an additional bit since negating - a negative number can remove one sign bit copy. */ - - num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode); - num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode); - - result = bitwidth - (bitwidth - num0) - (bitwidth - num1); - if (result > 0 - && (bitwidth > HOST_BITS_PER_WIDE_INT - || (((nonzero_bits (XEXP (x, 0), mode) - & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) - && ((nonzero_bits (XEXP (x, 1), mode) - & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)))) - result--; - - return MAX (1, result); - - case UDIV: - /* The result must be <= the first operand. If the first operand - has the high bit set, we know nothing about the number of sign - bit copies. */ - if (bitwidth > HOST_BITS_PER_WIDE_INT) - return 1; - else if ((nonzero_bits (XEXP (x, 0), mode) - & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) - return 1; - else - return num_sign_bit_copies_with_known (XEXP (x, 0), mode); - - case UMOD: - /* The result must be <= the second operand. */ - return num_sign_bit_copies_with_known (XEXP (x, 1), mode); - - case DIV: - /* Similar to unsigned division, except that we have to worry about - the case where the divisor is negative, in which case we have - to add 1. */ - result = num_sign_bit_copies_with_known (XEXP (x, 0), mode); - if (result > 1 - && (bitwidth > HOST_BITS_PER_WIDE_INT - || (nonzero_bits (XEXP (x, 1), mode) - & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) - result--; - - return result; - - case MOD: - result = num_sign_bit_copies_with_known (XEXP (x, 1), mode); - if (result > 1 - && (bitwidth > HOST_BITS_PER_WIDE_INT - || (nonzero_bits (XEXP (x, 1), mode) - & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) - result--; - - return result; - - case ASHIFTRT: - /* Shifts by a constant add to the number of bits equal to the - sign bit. */ - num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode); - if (GET_CODE (XEXP (x, 1)) == CONST_INT - && INTVAL (XEXP (x, 1)) > 0) - num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1))); - - return num0; - - case ASHIFT: - /* Left shifts destroy copies. */ - if (GET_CODE (XEXP (x, 1)) != CONST_INT - || INTVAL (XEXP (x, 1)) < 0 - || INTVAL (XEXP (x, 1)) >= (int) bitwidth) - return 1; - - num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode); - return MAX (1, num0 - INTVAL (XEXP (x, 1))); - - case IF_THEN_ELSE: - num0 = num_sign_bit_copies_with_known (XEXP (x, 1), mode); - num1 = num_sign_bit_copies_with_known (XEXP (x, 2), mode); - return MIN (num0, num1); - - case EQ: case NE: case GE: case GT: case LE: case LT: - case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT: - case GEU: case GTU: case LEU: case LTU: - case UNORDERED: case ORDERED: - /* If the constant is negative, take its 1's complement and remask. - Then see how many zero bits we have. */ - nonzero = STORE_FLAG_VALUE; - if (bitwidth <= HOST_BITS_PER_WIDE_INT - && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) - nonzero = (~nonzero) & GET_MODE_MASK (mode); - - return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); - break; - - default: - break; - } - - /* If we haven't been able to figure it out by one of the above rules, - see if some of the high-order bits are known to be zero. If so, - count those bits and return one less than that amount. If we can't - safely compute the mask for this mode, always return BITWIDTH. */ - - if (bitwidth > HOST_BITS_PER_WIDE_INT) - return 1; - - nonzero = nonzero_bits (x, mode); - return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) - ? 1 : bitwidth - floor_log2 (nonzero) - 1); + if (nonzero_sign_valid && reg_stat[REGNO (x)].sign_bit_copies != 0 + && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode)) + *result = reg_stat[REGNO (x)].sign_bit_copies; + + return NULL; } /* Return the number of "extended" bits there are in X, when interpreted |