diff options
author | Eric Botcazou <ebotcazou@adacore.com> | 2010-10-28 10:32:03 +0000 |
---|---|---|
committer | Eric Botcazou <ebotcazou@gcc.gnu.org> | 2010-10-28 10:32:03 +0000 |
commit | c575a65f66db572f4fbec024465544dbdd647685 (patch) | |
tree | 55015449f71fc5bb6714771b4207bea7931cf4de /gcc/combine.c | |
parent | f228e2ae59f04b71593e448cf4e654f51fe1318a (diff) | |
download | gcc-c575a65f66db572f4fbec024465544dbdd647685.zip gcc-c575a65f66db572f4fbec024465544dbdd647685.tar.gz gcc-c575a65f66db572f4fbec024465544dbdd647685.tar.bz2 |
combine.c (set_nonzero_bits_and_sign_copies): Use unsigned arithmetics in masking operations.
* combine.c (set_nonzero_bits_and_sign_copies): Use unsigned
arithmetics in masking operations.
(contains_muldiv): Likewise.
(try_combine): Likewise.
(find_split_point): Likewise.
(combine_simplify_rtx): Likewise.
(simplify_if_then_else): Likewise.
(simplify_set): Likewise.
(expand_compound_operation): Likewise.
(expand_field_assignment): Likewise.
(make_extraction): Likewise.
(extract_left_shift): Likewise.
(make_compound_operation): Likewise.
(force_to_mode): Likewise.
(make_field_assignment): Likewise.
(reg_nonzero_bits_for_combine): Likewise.
(simplify_shift_const_1): Likewise.
(simplify_comparison): Likewise.
From-SVN: r166027
Diffstat (limited to 'gcc/combine.c')
-rw-r--r-- | gcc/combine.c | 222 |
1 files changed, 112 insertions, 110 deletions
diff --git a/gcc/combine.c b/gcc/combine.c index 5ec7ee3..88b3ca5 100644 --- a/gcc/combine.c +++ b/gcc/combine.c @@ -1594,11 +1594,11 @@ set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data) if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD && CONST_INT_P (src) && INTVAL (src) > 0 - && 0 != (INTVAL (src) - & ((HOST_WIDE_INT) 1 + && 0 != (UINTVAL (src) + & ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) - src = GEN_INT (INTVAL (src) - | ((HOST_WIDE_INT) (-1) + src = GEN_INT (UINTVAL (src) + | ((unsigned HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (GET_MODE (x)))); #endif @@ -2091,7 +2091,7 @@ contains_muldiv (rtx x) case MULT: return ! (CONST_INT_P (XEXP (x, 1)) - && exact_log2 (INTVAL (XEXP (x, 1))) >= 0); + && exact_log2 (UINTVAL (XEXP (x, 1))) >= 0); default: if (BINARY_P (x)) return contains_muldiv (XEXP (x, 0)) @@ -3486,7 +3486,7 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p) if (split_code == MULT && CONST_INT_P (XEXP (*split, 1)) && INTVAL (XEXP (*split, 1)) > 0 - && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0) + && (i = exact_log2 (UINTVAL (XEXP (*split, 1)))) >= 0) { SUBST (*split, gen_rtx_ASHIFT (split_mode, XEXP (*split, 0), GEN_INT (i))); @@ -4576,7 +4576,8 @@ find_split_point (rtx *loc, rtx insn, bool set_src) unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x)); rtx dest = XEXP (SET_DEST (x), 0); enum machine_mode mode = GET_MODE (dest); - unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1; + unsigned HOST_WIDE_INT mask + = ((unsigned HOST_WIDE_INT) 1 << len) - 1; rtx or_mask; if (BITS_BIG_ENDIAN) @@ -4621,7 +4622,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src) if (CONST_INT_P (XEXP (SET_SRC (x), 1)) && REG_P (XEXP (SET_SRC (x), 0)) - && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7 + && (pos = exact_log2 (UINTVAL (XEXP (SET_SRC (x), 1)))) >= 7 && REG_P (SET_DEST (x)) && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE) @@ -4713,7 +4714,8 @@ find_split_point (rtx *loc, rtx insn, bool set_src) gen_rtx_LSHIFTRT (mode, gen_lowpart (mode, inner), GEN_INT (pos)), - GEN_INT (((HOST_WIDE_INT) 1 << len) - 1))); + GEN_INT (((unsigned HOST_WIDE_INT) 1 << len) + - 1))); split = find_split_point (&SET_SRC (x), insn, true); if (split && split != &SET_SRC (x)) @@ -5471,7 +5473,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest) whose value is a comparison can be replaced with a subreg if STORE_FLAG_VALUE permits. */ if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT - && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0 + && (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0 && (temp = get_last_value (XEXP (x, 0))) && COMPARISON_P (temp)) return gen_lowpart (mode, XEXP (x, 0)); @@ -5506,13 +5508,13 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest) && CONST_INT_P (XEXP (x, 1)) && CONST_INT_P (XEXP (XEXP (x, 0), 1)) && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1)) - && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0 - || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) + && ((i = exact_log2 (UINTVAL (XEXP (XEXP (x, 0), 1)))) >= 0 + || (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0) && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1)) - && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) - == ((HOST_WIDE_INT) 1 << (i + 1)) - 1)) + && (UINTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) + == ((unsigned HOST_WIDE_INT) 1 << (i + 1)) - 1)) || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0))) == (unsigned int) i + 1)))) @@ -5564,7 +5566,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest) (and <foo> (const_int pow2-1)) */ if (GET_CODE (XEXP (x, 1)) == AND && CONST_INT_P (XEXP (XEXP (x, 1), 1)) - && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0 + && exact_log2 (-UINTVAL (XEXP (XEXP (x, 1), 1))) >= 0 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0))) return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0), -INTVAL (XEXP (XEXP (x, 1), 1)) - 1); @@ -5598,7 +5600,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest) /* If this is a divide by a power of two, treat it as a shift if its first operand is a shift. */ if (CONST_INT_P (XEXP (x, 1)) - && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0 + && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0 && (GET_CODE (XEXP (x, 0)) == ASHIFT || GET_CODE (XEXP (x, 0)) == LSHIFTRT || GET_CODE (XEXP (x, 0)) == ASHIFTRT @@ -5798,7 +5800,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest) else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1))) SUBST (XEXP (x, 1), force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)), - ((HOST_WIDE_INT) 1 + ((unsigned HOST_WIDE_INT) 1 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x)))) - 1, 0)); @@ -6129,10 +6131,10 @@ simplify_if_then_else (rtx x) if (true_code == NE && XEXP (cond, 1) == const0_rtx && false_rtx == const0_rtx && CONST_INT_P (true_rtx) && ((1 == nonzero_bits (XEXP (cond, 0), mode) - && (i = exact_log2 (INTVAL (true_rtx))) >= 0) + && (i = exact_log2 (UINTVAL (true_rtx))) >= 0) || ((num_sign_bit_copies (XEXP (cond, 0), mode) == GET_MODE_BITSIZE (mode)) - && (i = exact_log2 (-INTVAL (true_rtx))) >= 0))) + && (i = exact_log2 (-UINTVAL (true_rtx))) >= 0))) return simplify_shift_const (NULL_RTX, ASHIFT, mode, gen_lowpart (mode, XEXP (cond, 0)), i); @@ -6141,9 +6143,9 @@ simplify_if_then_else (rtx x) if (true_code == NE && XEXP (cond, 1) == const0_rtx && false_rtx == const0_rtx && CONST_INT_P (true_rtx) && GET_MODE (XEXP (cond, 0)) == mode - && (INTVAL (true_rtx) & GET_MODE_MASK (mode)) + && (UINTVAL (true_rtx) & GET_MODE_MASK (mode)) == nonzero_bits (XEXP (cond, 0), mode) - && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0) + && (i = exact_log2 (UINTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0) return XEXP (cond, 0); return x; @@ -6172,7 +6174,7 @@ simplify_set (rtx x) if (GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) { - src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, 0); + src = force_to_mode (src, mode, ~(unsigned HOST_WIDE_INT) 0, 0); SUBST (SET_SRC (x), src); } @@ -6717,8 +6719,7 @@ expand_compound_operation (rtx x) && COMPARISON_P (XEXP (XEXP (x, 0), 0)) && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) <= HOST_BITS_PER_WIDE_INT) - && ((HOST_WIDE_INT) STORE_FLAG_VALUE - & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) + && (STORE_FLAG_VALUE & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) return XEXP (XEXP (x, 0), 0); /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */ @@ -6728,8 +6729,7 @@ expand_compound_operation (rtx x) && COMPARISON_P (SUBREG_REG (XEXP (x, 0))) && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) <= HOST_BITS_PER_WIDE_INT) - && ((HOST_WIDE_INT) STORE_FLAG_VALUE - & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) + && (STORE_FLAG_VALUE & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) return SUBREG_REG (XEXP (x, 0)); } @@ -6766,7 +6766,7 @@ expand_compound_operation (rtx x) simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0), pos), - ((HOST_WIDE_INT) 1 << len) - 1); + ((unsigned HOST_WIDE_INT) 1 << len) - 1); else /* Any other cases we can't handle. */ return x; @@ -6887,7 +6887,7 @@ expand_field_assignment (const_rtx x) /* Now compute the equivalent expression. Make a copy of INNER for the SET_DEST in case it is a MEM into which we will substitute; we don't want shared RTL in that case. */ - mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1); + mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << len) - 1); cleared = simplify_gen_binary (AND, compute_mode, simplify_gen_unary (NOT, compute_mode, simplify_gen_binary (ASHIFT, @@ -6963,7 +6963,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, else if (GET_CODE (inner) == ASHIFT && CONST_INT_P (XEXP (inner, 1)) && pos_rtx == 0 && pos == 0 - && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1))) + && len > UINTVAL (XEXP (inner, 1))) { /* We're extracting the least significant bits of an rtx (ashift X (const_int C)), where LEN > C. Extract the @@ -7369,7 +7369,8 @@ extract_left_shift (rtx x, int count) /* If we can safely shift this constant and we find the inner shift, make a new operation. */ if (CONST_INT_P (XEXP (x, 1)) - && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0 + && (UINTVAL (XEXP (x, 1)) + & ((((unsigned HOST_WIDE_INT) 1 << count)) - 1)) == 0 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0) return simplify_gen_binary (code, mode, tem, GEN_INT (INTVAL (XEXP (x, 1)) >> count)); @@ -7519,7 +7520,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) /* If the constant is a power of two minus one and the first operand is a logical right shift, make an extraction. */ if (GET_CODE (XEXP (x, 0)) == LSHIFTRT - && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) + && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0) { new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1), i, 1, @@ -7530,7 +7531,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) else if (GET_CODE (XEXP (x, 0)) == SUBREG && subreg_lowpart_p (XEXP (x, 0)) && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT - && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) + && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0) { new_rtx = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0), next_code); @@ -7543,7 +7544,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) || GET_CODE (XEXP (x, 0)) == IOR) && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT - && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) + && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0) { /* Apply the distributive law, and then try to make extractions. */ new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode, @@ -7559,7 +7560,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) else if (GET_CODE (XEXP (x, 0)) == ROTATE && CONST_INT_P (XEXP (XEXP (x, 0), 1)) - && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0 + && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0 && i <= INTVAL (XEXP (XEXP (x, 0), 1))) { new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); @@ -7595,7 +7596,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) representable by an extraction even if no shift is present. If it doesn't end up being a ZERO_EXTEND, we will ignore it unless we are in a COMPARE. */ - else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) + else if ((i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0) new_rtx = make_extraction (mode, make_compound_operation (XEXP (x, 0), next_code), @@ -7604,7 +7605,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) /* If we are in a comparison and this is an AND with a power of two, convert this into the appropriate bit extract. */ else if (in_code == COMPARE - && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) + && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0) new_rtx = make_extraction (mode, make_compound_operation (XEXP (x, 0), next_code), @@ -7685,7 +7686,8 @@ make_compound_operation (rtx x, enum rtx_code in_code) && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (inner)) && subreg_lowpart_p (x)) { - rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0, 0); + rtx newer + = force_to_mode (tem, mode, ~(unsigned HOST_WIDE_INT) 0, 0); /* If we have something other than a SUBREG, we might have done an expansion, so rerun ourselves. */ @@ -8047,16 +8049,17 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, && GET_MODE_MASK (GET_MODE (x)) != mask && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) { - HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1)) - | (GET_MODE_MASK (GET_MODE (x)) & ~mask)); + unsigned HOST_WIDE_INT cval + = UINTVAL (XEXP (x, 1)) + | (GET_MODE_MASK (GET_MODE (x)) & ~mask); int width = GET_MODE_BITSIZE (GET_MODE (x)); rtx y; /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative number, sign extend it. */ if (width > 0 && width < HOST_BITS_PER_WIDE_INT - && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) - cval |= (HOST_WIDE_INT) -1 << width; + && (cval & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0) + cval |= (unsigned HOST_WIDE_INT) -1 << width; y = simplify_gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval)); @@ -8084,8 +8087,8 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, number, sign extend it. */ if (width < HOST_BITS_PER_WIDE_INT - && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) - smask |= (HOST_WIDE_INT) -1 << width; + && (smask & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0) + smask |= (unsigned HOST_WIDE_INT) (-1) << width; if (CONST_INT_P (XEXP (x, 1)) && exact_log2 (- smask) >= 0 @@ -8121,8 +8124,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, /* Similarly, if C contains every bit in the fuller_mask, then we may replace with (not Y). */ if (CONST_INT_P (XEXP (x, 0)) - && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask) - == INTVAL (XEXP (x, 0)))) + && ((UINTVAL (XEXP (x, 0)) | fuller_mask) == UINTVAL (XEXP (x, 0)))) { x = simplify_gen_unary (NOT, GET_MODE (x), XEXP (x, 1), GET_MODE (x)); @@ -8147,7 +8149,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, && ((INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (INTVAL (XEXP (x, 1)))) < GET_MODE_BITSIZE (GET_MODE (x))) - && (INTVAL (XEXP (x, 1)) + && (UINTVAL (XEXP (x, 1)) & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0) { temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask) @@ -8294,7 +8296,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT) { - nonzero = ~(HOST_WIDE_INT) 0; + nonzero = ~(unsigned HOST_WIDE_INT) 0; /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) is the number of bits a full-width mask would have set. @@ -8425,7 +8427,8 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero, which is equal to STORE_FLAG_VALUE. */ - if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx + if ((mask & ~STORE_FLAG_VALUE) == 0 + && XEXP (x, 1) == const0_rtx && GET_MODE (XEXP (x, 0)) == mode && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0 && (nonzero_bits (XEXP (x, 0), mode) @@ -9014,8 +9017,8 @@ make_field_assignment (rtx x) && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1)) - && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1)) - == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1)) + && UINTVAL (XEXP (src, 1)) + == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1) src = XEXP (src, 0); return gen_rtx_SET (VOIDmode, assign, src); @@ -9417,11 +9420,11 @@ reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode, if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode) && CONST_INT_P (tem) && INTVAL (tem) > 0 - && 0 != (INTVAL (tem) - & ((HOST_WIDE_INT) 1 + && 0 != (UINTVAL (tem) + & ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) - tem = GEN_INT (INTVAL (tem) - | ((HOST_WIDE_INT) (-1) + tem = GEN_INT (UINTVAL (tem) + | ((unsigned HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (GET_MODE (x)))); #endif return tem; @@ -9815,8 +9818,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT && code == ASHIFTRT && ((nonzero_bits (varop, shift_mode) - & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1))) - == 0)) + & ((unsigned HOST_WIDE_INT) 1 + << (GET_MODE_BITSIZE (shift_mode) - 1))) == 0)) code = LSHIFTRT; if (((code == LSHIFTRT @@ -9889,13 +9892,13 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, is cheaper. But it is still better on those machines to merge two shifts into one. */ if (CONST_INT_P (XEXP (varop, 1)) - && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) + && exact_log2 (UINTVAL (XEXP (varop, 1))) >= 0) { varop = simplify_gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0), GEN_INT (exact_log2 ( - INTVAL (XEXP (varop, 1))))); + UINTVAL (XEXP (varop, 1))))); continue; } break; @@ -9903,13 +9906,13 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, case UDIV: /* Similar, for when divides are cheaper. */ if (CONST_INT_P (XEXP (varop, 1)) - && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) + && exact_log2 (UINTVAL (XEXP (varop, 1))) >= 0) { varop = simplify_gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0), GEN_INT (exact_log2 ( - INTVAL (XEXP (varop, 1))))); + UINTVAL (XEXP (varop, 1))))); continue; } break; @@ -9961,8 +9964,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, { /* C3 has the low-order C1 bits zero. */ - mask = (GET_MODE_MASK (mode) - & ~(((HOST_WIDE_INT) 1 << first_count) - 1)); + mask = GET_MODE_MASK (mode) + & ~(((unsigned HOST_WIDE_INT) 1 << first_count) - 1); varop = simplify_and_const_int (NULL_RTX, result_mode, XEXP (varop, 0), mask); @@ -10184,8 +10187,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT && STORE_FLAG_VALUE == -1 && nonzero_bits (XEXP (varop, 0), result_mode) == 1 - && merge_outer_ops (&outer_op, &outer_const, XOR, - (HOST_WIDE_INT) 1, result_mode, + && merge_outer_ops (&outer_op, &outer_const, XOR, 1, result_mode, &complement_p)) { varop = XEXP (varop, 0); @@ -10209,8 +10211,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, /* NEG commutes with ASHIFT since it is multiplication. Move the NEG outside to allow shifts to combine. */ if (code == ASHIFT - && merge_outer_ops (&outer_op, &outer_const, NEG, - (HOST_WIDE_INT) 0, result_mode, + && merge_outer_ops (&outer_op, &outer_const, NEG, 0, result_mode, &complement_p)) { varop = XEXP (varop, 0); @@ -10226,8 +10227,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, && count == (GET_MODE_BITSIZE (result_mode) - 1) && XEXP (varop, 1) == constm1_rtx && nonzero_bits (XEXP (varop, 0), result_mode) == 1 - && merge_outer_ops (&outer_op, &outer_const, XOR, - (HOST_WIDE_INT) 1, result_mode, + && merge_outer_ops (&outer_op, &outer_const, XOR, 1, result_mode, &complement_p)) { count = 0; @@ -10951,7 +10951,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) else if (const_op == 0 && mode_width <= HOST_BITS_PER_WIDE_INT && (nonzero_bits (op0, mode) - & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0) + & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) + == 0) code = EQ; break; @@ -10981,7 +10982,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) else if (const_op == 0 && mode_width <= HOST_BITS_PER_WIDE_INT && (nonzero_bits (op0, mode) - & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0) + & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) + == 0) code = NE; break; @@ -10996,8 +10998,9 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) } /* (unsigned) < 0x80000000 is equivalent to >= 0. */ - else if ((mode_width <= HOST_BITS_PER_WIDE_INT) - && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))) + else if (mode_width <= HOST_BITS_PER_WIDE_INT + && (unsigned HOST_WIDE_INT) const_op + == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)) { const_op = 0, op1 = const0_rtx; code = GE; @@ -11012,8 +11015,9 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) code = EQ; /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */ - else if ((mode_width <= HOST_BITS_PER_WIDE_INT) - && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)) + else if (mode_width <= HOST_BITS_PER_WIDE_INT + && (unsigned HOST_WIDE_INT) const_op + == ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)) - 1) { const_op = 0, op1 = const0_rtx; code = GE; @@ -11031,8 +11035,9 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) } /* (unsigned) >= 0x80000000 is equivalent to < 0. */ - else if ((mode_width <= HOST_BITS_PER_WIDE_INT) - && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))) + else if (mode_width <= HOST_BITS_PER_WIDE_INT + && (unsigned HOST_WIDE_INT) const_op + == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)) { const_op = 0, op1 = const0_rtx; code = LT; @@ -11047,8 +11052,9 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) code = NE; /* (unsigned) > 0x7fffffff is equivalent to < 0. */ - else if ((mode_width <= HOST_BITS_PER_WIDE_INT) - && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)) + else if (mode_width <= HOST_BITS_PER_WIDE_INT + && (unsigned HOST_WIDE_INT) const_op + == ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)) - 1) { const_op = 0, op1 = const0_rtx; code = LT; @@ -11071,8 +11077,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) if (sign_bit_comparison_p && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) op0 = force_to_mode (op0, mode, - ((HOST_WIDE_INT) 1 - << (GET_MODE_BITSIZE (mode) - 1)), + (unsigned HOST_WIDE_INT) 1 + << (GET_MODE_BITSIZE (mode) - 1), 0); /* Now try cases based on the opcode of OP0. If none of the cases @@ -11092,7 +11098,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) && CONST_INT_P (XEXP (op0, 0)) && XEXP (op0, 1) == const1_rtx && equality_comparison_p && const_op == 0 - && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0) + && (i = exact_log2 (UINTVAL (XEXP (op0, 0)))) >= 0) { if (BITS_BIG_ENDIAN) { @@ -11168,7 +11174,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) && (GET_CODE (XEXP (op0, 0)) == ABS || (mode_width <= HOST_BITS_PER_WIDE_INT && (nonzero_bits (XEXP (op0, 0), mode) - & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0))) + & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) + == 0))) { op0 = XEXP (op0, 0); code = (code == LT ? NE : EQ); @@ -11205,7 +11212,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) && mode_width <= HOST_BITS_PER_WIDE_INT) { op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), - ((HOST_WIDE_INT) 1 + ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1 - INTVAL (XEXP (op0, 1))))); code = (code == LT ? NE : EQ); @@ -11382,8 +11389,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) of bits in X minus 1, is one iff X > 0. */ if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT && CONST_INT_P (XEXP (XEXP (op0, 0), 1)) - && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1)) - == mode_width - 1 + && UINTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) { op0 = XEXP (op0, 1); @@ -11429,7 +11435,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT && (STORE_FLAG_VALUE - & (((HOST_WIDE_INT) 1 + & (((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1)))) && (code == LT || code == GE))) { @@ -11469,11 +11475,9 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) && GET_CODE (XEXP (op0, 0)) == ASHIFT && XEXP (XEXP (op0, 0), 0) == const1_rtx) { - op0 = simplify_and_const_int - (NULL_RTX, mode, gen_rtx_LSHIFTRT (mode, - XEXP (op0, 1), - XEXP (XEXP (op0, 0), 1)), - (HOST_WIDE_INT) 1); + op0 = gen_rtx_LSHIFTRT (mode, XEXP (op0, 1), + XEXP (XEXP (op0, 0), 1)); + op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1); continue; } @@ -11530,7 +11534,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) transformation is invalid. */ if ((equality_comparison_p || unsigned_comparison_p) && CONST_INT_P (XEXP (op0, 1)) - && (i = exact_log2 ((INTVAL (XEXP (op0, 1)) + && (i = exact_log2 ((UINTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) + 1)) >= 0 && const_op >> i == 0 @@ -11590,8 +11594,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) && XEXP (op0, 1) == const1_rtx && GET_CODE (XEXP (op0, 0)) == NOT) { - op0 = simplify_and_const_int - (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1); + op0 = simplify_and_const_int (NULL_RTX, mode, + XEXP (XEXP (op0, 0), 0), 1); code = (code == NE ? EQ : NE); continue; } @@ -11611,13 +11615,13 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) && CONST_INT_P (XEXP (shift_op, 1)) && CONST_INT_P (shift_count) && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT - && (INTVAL (XEXP (shift_op, 1)) - == (HOST_WIDE_INT) 1 << INTVAL (shift_count)))) + && (UINTVAL (XEXP (shift_op, 1)) + == (unsigned HOST_WIDE_INT) 1 + << INTVAL (shift_count)))) { - op0 = simplify_and_const_int - (NULL_RTX, mode, - gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count), - (HOST_WIDE_INT) 1); + op0 + = gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count); + op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1); code = (code == NE ? EQ : NE); continue; } @@ -11634,8 +11638,9 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) && INTVAL (XEXP (op0, 1)) >= 0 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p) < HOST_BITS_PER_WIDE_INT) - && ((const_op - & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0) + && (((unsigned HOST_WIDE_INT) const_op + & (((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) + - 1)) == 0) && mode_width <= HOST_BITS_PER_WIDE_INT && (nonzero_bits (XEXP (op0, 0), mode) & ~(mask >> (INTVAL (XEXP (op0, 1)) @@ -11657,7 +11662,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) && mode_width <= HOST_BITS_PER_WIDE_INT) { op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), - ((HOST_WIDE_INT) 1 + ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1 - INTVAL (XEXP (op0, 1))))); code = (code == LT ? NE : EQ); @@ -11669,11 +11674,9 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) low-order bit. */ if (const_op == 0 && equality_comparison_p && CONST_INT_P (XEXP (op0, 1)) - && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) - == mode_width - 1) + && UINTVAL (XEXP (op0, 1)) == mode_width - 1) { - op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), - (HOST_WIDE_INT) 1); + op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), 1); continue; } break; @@ -11780,8 +11783,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) if (const_op == 0 && (equality_comparison_p || sign_bit_comparison_p) && CONST_INT_P (XEXP (op0, 1)) - && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) - == mode_width - 1) + && UINTVAL (XEXP (op0, 1)) == mode_width - 1) { op0 = XEXP (op0, 0); code = (code == NE || code == GT ? LT : GE); @@ -11878,7 +11880,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) { op0 = simplify_gen_binary (AND, tmode, gen_lowpart (tmode, op0), - GEN_INT ((HOST_WIDE_INT) 1 + GEN_INT ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))); code = (code == LT) ? NE : EQ; |