diff options
author | Richard Kenner <kenner@gcc.gnu.org> | 1994-04-10 08:03:37 -0400 |
---|---|---|
committer | Richard Kenner <kenner@gcc.gnu.org> | 1994-04-10 08:03:37 -0400 |
commit | 45620ed410139824655ea4c26db4ad7d8251e21d (patch) | |
tree | ad096dc10d4fc4f073328d477e2dcf6db96f1ccc /gcc | |
parent | e5df894b42716006a8fccd922530782b7aaeb745 (diff) | |
download | gcc-45620ed410139824655ea4c26db4ad7d8251e21d.zip gcc-45620ed410139824655ea4c26db4ad7d8251e21d.tar.gz gcc-45620ed410139824655ea4c26db4ad7d8251e21d.tar.bz2 |
No longer handle LSHIFT.
From-SVN: r7020
Diffstat (limited to 'gcc')
-rw-r--r-- | gcc/combine.c | 40 | ||||
-rw-r--r-- | gcc/cse.c | 12 | ||||
-rw-r--r-- | gcc/function.c | 4 | ||||
-rw-r--r-- | gcc/genattrtab.c | 3 | ||||
-rw-r--r-- | gcc/loop.c | 2 | ||||
-rw-r--r-- | gcc/reload1.c | 4 |
6 files changed, 20 insertions, 45 deletions
diff --git a/gcc/combine.c b/gcc/combine.c index 0616e7c..5e69d80 100644 --- a/gcc/combine.c +++ b/gcc/combine.c @@ -3856,7 +3856,6 @@ simplify_rtx (x, op0_mode, last, in_dest) SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); break; - case LSHIFT: case ASHIFT: case LSHIFTRT: case ASHIFTRT: @@ -5296,7 +5295,6 @@ make_compound_operation (x, in_code) switch (code) { case ASHIFT: - case LSHIFT: /* Convert shifts by constants into multiplications if inside an address. */ if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT @@ -5338,7 +5336,7 @@ make_compound_operation (x, in_code) XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1, 0, in_code == COMPARE); } - /* Same as previous, but for (xor/ior (lshift...) (lshift...)). */ + /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */ else if ((GET_CODE (XEXP (x, 0)) == XOR || GET_CODE (XEXP (x, 0)) == IOR) && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT @@ -5811,7 +5809,6 @@ force_to_mode (x, mode, mask, reg, just_select) break; case ASHIFT: - case LSHIFT: /* For left shifts, do the same, but just for the first operand. However, we cannot do anything with shifts where we cannot guarantee that the counts are smaller than the size of the mode @@ -6458,8 +6455,7 @@ apply_distributive_law (x) break; case ASHIFT: - case LSHIFT: - /* These are also multiplies, so they distribute over everything. */ + /* This is also a multiply, so it distributes over everything. */ break; case SUBREG: @@ -6926,7 +6922,6 @@ nonzero_bits (x, mode) case ASHIFTRT: case LSHIFTRT: case ASHIFT: - case LSHIFT: case ROTATE: /* The nonzero bits are in two classes: any bits within MODE that aren't in GET_MODE (x) are always significant. The rest of the @@ -6961,7 +6956,7 @@ nonzero_bits (x, mode) if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count))) inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count); } - else if (code == LSHIFT || code == ASHIFT) + else if (code == ASHIFT) inner <<= count; else inner = ((inner << (count % width) @@ -7229,7 +7224,6 @@ num_sign_bit_copies (x, mode) return num0; case ASHIFT: - case LSHIFT: /* Left shifts destroy copies. */ if (GET_CODE (XEXP (x, 1)) != CONST_INT || INTVAL (XEXP (x, 1)) < 0 @@ -7477,10 +7471,6 @@ simplify_shift_const (x, code, result_mode, varop, count) if (code == ROTATERT) code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count; - /* Canonicalize LSHIFT to ASHIFT. */ - if (code == LSHIFT) - code = ASHIFT; - /* We need to determine what mode we will do the shift in. If the shift is a ASHIFTRT or ROTATE, we must always do it in the mode it was originally done in. Otherwise, we can do it in MODE, the widest @@ -7672,7 +7662,6 @@ simplify_shift_const (x, code, result_mode, varop, count) case LSHIFTRT: case ASHIFT: - case LSHIFT: case ROTATE: /* Here we have two nested shifts. The result is usually the AND of a new shift with a mask. We compute the result below. */ @@ -7687,9 +7676,6 @@ simplify_shift_const (x, code, result_mode, varop, count) unsigned HOST_WIDE_INT mask; rtx mask_rtx; - if (first_code == LSHIFT) - first_code = ASHIFT; - /* We have one common special case. We can't do any merging if the inner code is an ASHIFTRT of a smaller mode. However, if we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2) @@ -7894,11 +7880,11 @@ simplify_shift_const (x, code, result_mode, varop, count) break; case EQ: - /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE + /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE says that the sign bit can be tested, FOO has mode MODE, C is - GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit - may be nonzero. */ - if (code == LSHIFT + GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit + that may be nonzero. */ + if (code == LSHIFTRT && XEXP (varop, 1) == const0_rtx && GET_MODE (XEXP (varop, 0)) == result_mode && count == GET_MODE_BITSIZE (result_mode) - 1 @@ -8517,9 +8503,7 @@ simplify_comparison (code, pop0, pop1) if (GET_CODE (op0) == GET_CODE (op1) && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ)) - || ((GET_CODE (op0) == LSHIFTRT - || GET_CODE (op0) == ASHIFT - || GET_CODE (op0) == LSHIFT) + || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT) && (code != GT && code != LT && code != GE && code != LE)) || (GET_CODE (op0) == ASHIFTRT && (code != GTU && code != LTU @@ -8535,7 +8519,7 @@ simplify_comparison (code, pop0, pop1) if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT) mask &= (mask >> shift_count) << shift_count; - else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT) + else if (GET_CODE (op0) == ASHIFT) mask = (mask & (mask << shift_count)) >> shift_count; if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0 @@ -9145,8 +9129,7 @@ simplify_comparison (code, pop0, pop1) /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This will be converted to a ZERO_EXTRACT later. */ if (const_op == 0 && equality_comparison_p - && (GET_CODE (XEXP (op0, 0)) == ASHIFT - || GET_CODE (XEXP (op0, 0)) == LSHIFT) + && GET_CODE (XEXP (op0, 0)) == ASHIFT && XEXP (XEXP (op0, 0), 0) == const1_rtx) { op0 = simplify_and_const_int @@ -9213,8 +9196,7 @@ simplify_comparison (code, pop0, pop1) break; case ASHIFT: - case LSHIFT: - /* If we have (compare (xshift FOO N) (const_int C)) and + /* If we have (compare (ashift FOO N) (const_int C)) and the high order N bits of FOO (N+1 if an inequality comparison) are known to be zero, we can do this by comparing FOO with C shifted right N bits so long as the low-order N bits of C are @@ -3431,7 +3431,7 @@ simplify_binary_operation (code, mode, op0, op1) break; case LSHIFTRT: case ASHIFTRT: - case ASHIFT: case LSHIFT: + case ASHIFT: case ROTATE: case ROTATERT: #ifdef SHIFT_COUNT_TRUNCATED if (SHIFT_COUNT_TRUNCATED) @@ -3444,9 +3444,8 @@ simplify_binary_operation (code, mode, op0, op1) if (code == LSHIFTRT || code == ASHIFTRT) rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, code == ASHIFTRT); - else if (code == ASHIFT || code == LSHIFT) - lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, - code == ASHIFT); + else if (code == ASHIFT) + lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1); else if (code == ROTATE) lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv); else /* code == ROTATERT */ @@ -3851,7 +3850,6 @@ simplify_binary_operation (code, mode, op0, op1) /* ... fall through ... */ - case LSHIFT: case ASHIFT: case ASHIFTRT: case LSHIFTRT: @@ -3993,7 +3991,6 @@ simplify_binary_operation (code, mode, op0, op1) break; case ASHIFT: - case LSHIFT: if (arg1 < 0) return 0; @@ -4816,8 +4813,7 @@ fold_rtx (x, insn) to compute that in SImode, because a 32-bit shift in SImode is unpredictable. We know the value is 0. */ if (op0 && op1 - && (GET_CODE (elt->exp) == ASHIFT - || GET_CODE (elt->exp) == LSHIFT) + && GET_CODE (elt->exp) == ASHIFT && GET_CODE (op1) == CONST_INT && INTVAL (op1) >= GET_MODE_BITSIZE (mode)) { diff --git a/gcc/function.c b/gcc/function.c index ca6eb6f..4f40d0b 100644 --- a/gcc/function.c +++ b/gcc/function.c @@ -2719,8 +2719,8 @@ instantiate_virtual_regs_1 (loc, object, extra_insns) case DIV: case UDIV: case MOD: case UMOD: case AND: case IOR: case XOR: - case LSHIFT: case ASHIFT: case ROTATE: - case ASHIFTRT: case LSHIFTRT: case ROTATERT: + case ROTATERT: case ROTATE: + case ASHIFTRT: case LSHIFTRT: case ASHIFT: case NE: case EQ: case GE: case GT: case GEU: case GTU: case LE: case LT: case LEU: case LTU: diff --git a/gcc/genattrtab.c b/gcc/genattrtab.c index b5821d1..bacdafb 100644 --- a/gcc/genattrtab.c +++ b/gcc/genattrtab.c @@ -4287,7 +4287,7 @@ write_test_expr (exp, in_comparison) case PLUS: case MINUS: case MULT: case DIV: case MOD: case AND: case IOR: case XOR: - case LSHIFT: case ASHIFT: case LSHIFTRT: case ASHIFTRT: + case ASHIFT: case LSHIFTRT: case ASHIFTRT: write_test_expr (XEXP (exp, 0), in_comparison || comparison_operator); switch (code) { @@ -4351,7 +4351,6 @@ write_test_expr (exp, in_comparison) case XOR: printf (" ^ "); break; - case LSHIFT: case ASHIFT: printf (" << "); break; @@ -2470,7 +2470,6 @@ mark_loop_jump (x, loop_num) case PLUS: case MINUS: case MULT: - case LSHIFT: mark_loop_jump (XEXP (x, 0), loop_num); mark_loop_jump (XEXP (x, 1), loop_num); return; @@ -5165,7 +5164,6 @@ simplify_giv_expr (x, benefit) } case ASHIFT: - case LSHIFT: /* Shift by constant is multiply by power of two. */ if (GET_CODE (XEXP (x, 1)) != CONST_INT) return 0; diff --git a/gcc/reload1.c b/gcc/reload1.c index 2ff7fd4..d25dc73 100644 --- a/gcc/reload1.c +++ b/gcc/reload1.c @@ -2928,8 +2928,8 @@ eliminate_regs (x, mem_mode, insn) case DIV: case UDIV: case MOD: case UMOD: case AND: case IOR: case XOR: - case LSHIFT: case ASHIFT: case ROTATE: - case ASHIFTRT: case LSHIFTRT: case ROTATERT: + case ROTATERT: case ROTATE: + case ASHIFTRT: case LSHIFTRT: case ASHIFT: case NE: case EQ: case GE: case GT: case GEU: case GTU: case LE: case LT: case LEU: case LTU: |