aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Kenner <kenner@gcc.gnu.org>1992-06-20 06:51:46 -0400
committerRichard Kenner <kenner@gcc.gnu.org>1992-06-20 06:51:46 -0400
commit77fa0940a0a588210c367358e57154c3b5f07167 (patch)
tree76c843976b76720a8a8745346d8bf4f8568746e2 /gcc
parent54d8c2432f1b93a4a439786a6d193a1ca0b90af6 (diff)
downloadgcc-77fa0940a0a588210c367358e57154c3b5f07167.zip
gcc-77fa0940a0a588210c367358e57154c3b5f07167.tar.gz
gcc-77fa0940a0a588210c367358e57154c3b5f07167.tar.bz2
*** empty log message ***
From-SVN: r1223
Diffstat (limited to 'gcc')
-rw-r--r--gcc/combine.c52
-rw-r--r--gcc/config/romp/romp.md6
-rw-r--r--gcc/config/rs6000/rs6000.md20
-rw-r--r--gcc/cse.c67
-rw-r--r--gcc/expr.c8
-rw-r--r--gcc/stor-layout.c31
-rw-r--r--gcc/varasm.c36
7 files changed, 147 insertions, 73 deletions
diff --git a/gcc/combine.c b/gcc/combine.c
index 6f5c5e9..6026d45 100644
--- a/gcc/combine.c
+++ b/gcc/combine.c
@@ -356,6 +356,7 @@ static rtx expand_compound_operation ();
static rtx expand_field_assignment ();
static rtx make_extraction ();
static int get_pos_from_mask ();
+static rtx force_to_mode ();
static rtx make_field_assignment ();
static rtx make_compound_operation ();
static rtx apply_distributive_law ();
@@ -1295,7 +1296,8 @@ try_combine (i3, i2, i1)
if (undobuf.other_insn == 0
&& (cc_use = find_single_use (SET_DEST (newpat), i3,
&undobuf.other_insn))
- && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use), i2src))
+ && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
+ i2src, const0_rtx))
!= GET_MODE (SET_DEST (newpat))))
{
int regno = REGNO (SET_DEST (newpat));
@@ -2428,6 +2430,11 @@ subst (x, from, to, in_dest, unique_copy)
case '<':
temp = simplify_relational_operation (code, op0_mode,
XEXP (x, 0), XEXP (x, 1));
+#ifdef FLOAT_STORE_FLAG_VALUE
+ if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
+ temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
+ : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
+#endif
break;
case 'c':
case '2':
@@ -3194,7 +3201,7 @@ subst (x, from, to, in_dest, unique_copy)
#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
/* If this machine has CC modes other than CCmode, check to see
if we need to use a different CC mode here. */
- compare_mode = SELECT_CC_MODE (new_code, op0);
+ compare_mode = SELECT_CC_MODE (new_code, op0, op1);
/* If the mode changed, we have to change SET_DEST, the mode
in the compare, and the mode in the place SET_DEST is used.
@@ -3636,25 +3643,6 @@ subst (x, from, to, in_dest, unique_copy)
case ASHIFTRT:
case ROTATE:
case ROTATERT:
-#ifdef SHIFT_COUNT_TRUNCATED
- /* (*shift <X> (sign_extend <Y>)) = (*shift <X> <Y>) (most machines).
- True for all kinds of shifts and also for zero_extend. */
- if ((GET_CODE (XEXP (x, 1)) == SIGN_EXTEND
- || GET_CODE (XEXP (x, 1)) == ZERO_EXTEND)
- && FAKE_EXTEND_SAFE_P (mode, XEXP (XEXP (x, 1), 0)))
- SUBST (XEXP (x, 1),
- /* This is a perverse SUBREG, wider than its base. */
- gen_lowpart_for_combine (mode, XEXP (XEXP (x, 1), 0)));
-
- /* tege: Change (bitshifts ... (and ... mask), c)
- to (bitshifts ... c) if mask just masks the bits the bitshift
- insns do automatically on this machine. */
- if (GET_CODE (XEXP (x, 1)) == AND
- && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
- && (~ INTVAL (XEXP (XEXP (x, 1), 1)) & GET_MODE_MASK (mode)) == 0)
- SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
-#endif
-
/* If this is a shift by a constant amount, simplify it. */
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
{
@@ -3663,6 +3651,15 @@ subst (x, from, to, in_dest, unique_copy)
if (GET_CODE (x) != code)
goto restart;
}
+
+#ifdef SHIFT_COUNT_TRUNCATED
+ else if (GET_CODE (XEXP (x, 1)) != REG)
+ SUBST (XEXP (x, 1),
+ force_to_mode (XEXP (x, 1), GET_MODE (x),
+ exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
+ 0));
+#endif
+
break;
}
@@ -4011,6 +4008,15 @@ make_extraction (mode, inner, pos, pos_rtx, len,
MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
}
+ else if (GET_MODE (inner) == REG)
+ /* We can't call gen_lowpart_for_combine here since we always want
+ a SUBREG and it would sometimes return a new hard register. */
+ new = gen_rtx (SUBREG, tmode, inner,
+ (WORDS_BIG_ENDIAN
+ && GET_MODE_SIZE (is_mode) > UNITS_PER_WORD)
+ ? ((GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (tmode)
+ / UNITS_PER_WORD))
+ : 0);
else
new = gen_lowpart_for_combine (tmode, inner);
@@ -4019,7 +4025,9 @@ make_extraction (mode, inner, pos, pos_rtx, len,
if (in_dest)
return (GET_CODE (new) == MEM ? new
- : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new));
+ : (GET_CODE (new) != SUBREG
+ ? gen_rtx (CLOBBER, tmode, const0_rtx)
+ : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
/* Otherwise, sign- or zero-extend unless we already are in the
proper mode. */
diff --git a/gcc/config/romp/romp.md b/gcc/config/romp/romp.md
index e2e2c81..bf59f5a 100644
--- a/gcc/config/romp/romp.md
+++ b/gcc/config/romp/romp.md
@@ -1602,7 +1602,7 @@
(define_insn "ashrsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r")
(ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0")
- (match_operand:QI 2 "reg_or_cint_operand" "r,n")))]
+ (match_operand:SI 2 "reg_or_cint_operand" "r,n")))]
""
"@
sar %0,%2
@@ -1612,7 +1612,7 @@
(define_insn "lshrsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r")
(lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0")
- (match_operand:QI 2 "reg_or_cint_operand" "r,n")))]
+ (match_operand:SI 2 "reg_or_cint_operand" "r,n")))]
""
"@
sr %0,%2
@@ -1631,7 +1631,7 @@
(define_insn "ashlsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r")
(ashift:SI (match_operand:SI 1 "register_operand" "0,0")
- (match_operand:QI 2 "reg_or_cint_operand" "r,n")))]
+ (match_operand:SI 2 "reg_or_cint_operand" "r,n")))]
""
"@
sl %0,%2
diff --git a/gcc/config/rs6000/rs6000.md b/gcc/config/rs6000/rs6000.md
index d3e6bce..abbdf5b 100644
--- a/gcc/config/rs6000/rs6000.md
+++ b/gcc/config/rs6000/rs6000.md
@@ -2083,6 +2083,26 @@
mt%0 %1"
[(set_attr "type" "*,load,*,*,*,*,*,mtlr")])
+;; Split a load of a large constant into the appropriate two-insn
+;; sequence.
+
+(define_split
+ [(set (match_operand:SI 0 "gpc_reg_operand" "")
+ (match_operand:SI 1 "const_int_operand" ""))]
+ "(unsigned) (INTVAL (operands[1]) + 0x8000) >= 0x10000
+ && (INTVAL (operands[1]) & 0xffff) != 0"
+ [(set (match_dup 0)
+ (match_dup 2))
+ (set (match_dup 0)
+ (ior:SI (match_dup 0)
+ (match_dup 3)))]
+ "
+{
+ operands[2] = gen_rtx (CONST_INT, VOIDmode,
+ INTVAL (operands[1]) & 0xffff0000);
+ operands[3] = gen_rtx (CONST_INT, VOIDmode, INTVAL (operands[1]) & 0xffff);
+}")
+
(define_insn ""
[(set (match_operand:CC 2 "cc_reg_operand" "=x")
(compare:CC (match_operand:SI 1 "gpc_reg_operand" "r")
diff --git a/gcc/cse.c b/gcc/cse.c
index d3a89d2..e945768 100644
--- a/gcc/cse.c
+++ b/gcc/cse.c
@@ -2379,7 +2379,7 @@ canon_reg (x, insn)
&& (((REGNO (new) < FIRST_PSEUDO_REGISTER)
!= (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
|| (insn != 0 && insn_n_dups[recog_memoized (insn)] > 0)))
- validate_change (insn, &XEXP (x, i), new, 0);
+ validate_change (insn, &XEXP (x, i), new, 1);
else
XEXP (x, i) = new;
}
@@ -5359,6 +5359,7 @@ cse_insn (insn, in_libcall_block)
else if (GET_CODE (SET_SRC (x)) == CALL)
{
canon_reg (SET_SRC (x), insn);
+ apply_change_group ();
fold_rtx (SET_SRC (x), insn);
invalidate (SET_DEST (x));
}
@@ -5400,6 +5401,7 @@ cse_insn (insn, in_libcall_block)
if (GET_CODE (SET_SRC (y)) == CALL)
{
canon_reg (SET_SRC (y), insn);
+ apply_change_group ();
fold_rtx (SET_SRC (y), insn);
invalidate (SET_DEST (y));
}
@@ -5428,6 +5430,7 @@ cse_insn (insn, in_libcall_block)
else if (GET_CODE (y) == CALL)
{
canon_reg (y, insn);
+ apply_change_group ();
fold_rtx (y, insn);
}
}
@@ -5449,6 +5452,7 @@ cse_insn (insn, in_libcall_block)
else if (GET_CODE (x) == CALL)
{
canon_reg (x, insn);
+ apply_change_group ();
fold_rtx (x, insn);
}
@@ -5467,20 +5471,9 @@ cse_insn (insn, in_libcall_block)
we don't break the duplicate nature of the pattern. So we will replace
both operands at the same time. Otherwise, we would fail to find an
equivalent substitution in the loop calling validate_change below.
- (We also speed up that loop when a canonicalization was done since
- recog_memoized need not be called for just a canonicalization unless
- a pseudo register is being replaced by a hard reg of vice versa.)
We used to suppress canonicalization of DEST if it appears in SRC,
- but we don't do this any more.
-
- ??? The way this code is written now, if we have a MATCH_DUP between
- two operands that are pseudos and we would want to canonicalize them
- to a hard register, we won't do that. The only time this would happen
- is if the hard reg was a fixed register, and this should be rare.
-
- ??? This won't work if there is a MATCH_DUP between an input and an
- output, but these never worked and must be declared invalid. */
+ but we don't do this any more. */
for (i = 0; i < n_sets; i++)
{
@@ -5488,19 +5481,20 @@ cse_insn (insn, in_libcall_block)
rtx src = SET_SRC (sets[i].rtl);
rtx new = canon_reg (src, insn);
- if (GET_CODE (new) == REG && GET_CODE (src) == REG
- && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
- != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
- validate_change (insn, &SET_SRC (sets[i].rtl), new, 0);
+ if ((GET_CODE (new) == REG && GET_CODE (src) == REG
+ && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
+ != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
+ || insn_n_dups[recog_memoized (insn)] > 0)
+ validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
else
SET_SRC (sets[i].rtl) = new;
if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
{
validate_change (insn, &XEXP (dest, 1),
- canon_reg (XEXP (dest, 1), insn), 0);
+ canon_reg (XEXP (dest, 1), insn), 1);
validate_change (insn, &XEXP (dest, 2),
- canon_reg (XEXP (dest, 2), insn), 0);
+ canon_reg (XEXP (dest, 2), insn), 1);
}
while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
@@ -5512,6 +5506,14 @@ cse_insn (insn, in_libcall_block)
canon_reg (dest, insn);
}
+ /* Now that we have done all the replacements, we can apply the change
+ group and see if they all work. Note that this will cause some
+ canonicalizations that would have worked individually not to be applied
+ because some other canonicalization didn't work, but this should not
+ occur often. */
+
+ apply_change_group ();
+
/* Set sets[i].src_elt to the class each source belongs to.
Detect assignments from or to volatile things
and set set[i] to zero so they will be ignored
@@ -6294,11 +6296,16 @@ cse_insn (insn, in_libcall_block)
sets[i].src_elt = src_eqv_elt;
invalidate_from_clobbers (&writes_memory, x);
- /* Memory, and some registers, are invalidate by subroutine calls. */
+
+ /* Some registers are invalidated by subroutine calls. Memory is
+ invalidated by non-constant calls. */
+
if (GET_CODE (insn) == CALL_INSN)
{
static struct write_data everything = {0, 1, 1, 1};
- invalidate_memory (&everything);
+
+ if (! CONST_CALL_P (insn))
+ invalidate_memory (&everything);
invalidate_for_call ();
}
@@ -7672,7 +7679,7 @@ delete_dead_from_cse (insns, nreg)
int nreg;
{
int *counts = (int *) alloca (nreg * sizeof (int));
- rtx insn;
+ rtx insn, prev;
rtx tem;
int i;
int in_libcall = 0;
@@ -7685,14 +7692,16 @@ delete_dead_from_cse (insns, nreg)
/* Go from the last insn to the first and delete insns that only set unused
registers or copy a register to itself. As we delete an insn, remove
usage counts for registers it uses. */
- for (insn = prev_real_insn (get_last_insn ());
- insn; insn = prev_real_insn (insn))
+ for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
{
int live_insn = 0;
+ prev = prev_real_insn (insn);
+
/* Don't delete any insns that are part of a libcall block.
- Flow or loop might get confused if we did that. */
- if (find_reg_note (insn, REG_LIBCALL, 0))
+ Flow or loop might get confused if we did that. Remember
+ that we are scanning backwards. */
+ if (find_reg_note (insn, REG_RETVAL, 0))
in_libcall = 1;
if (in_libcall)
@@ -7754,12 +7763,10 @@ delete_dead_from_cse (insns, nreg)
if (! live_insn)
{
count_reg_usage (insn, counts, -1);
- PUT_CODE (insn, NOTE);
- NOTE_SOURCE_FILE (insn) = 0;
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ delete_insn (insn);
}
- if (find_reg_note (insn, REG_RETVAL, 0))
+ if (find_reg_note (insn, REG_LIBCALL, 0))
in_libcall = 0;
}
}
diff --git a/gcc/expr.c b/gcc/expr.c
index 36e3adc..4cdbd76 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -6318,9 +6318,11 @@ compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
/* If this is a signed equality comparison, we can do it as an
unsigned comparison since zero-extension is cheaper than sign
- extension and comparisons with zero are done as unsigned. If we
- are comparing against a constant, we must convert it to what it
- would look like unsigned. */
+ extension and comparisons with zero are done as unsigned. This is
+ the case even on machines that can do fast sign extension, since
+ zero-extension is easier to combinen with other operations than
+ sign-extension is. If we are comparing against a constant, we must
+ convert it to what it would look like unsigned. */
if ((code == EQ || code == NE) && ! unsignedp
&& GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_INT)
{
diff --git a/gcc/stor-layout.c b/gcc/stor-layout.c
index dc62a6d..16d1dab 100644
--- a/gcc/stor-layout.c
+++ b/gcc/stor-layout.c
@@ -952,9 +952,9 @@ fixup_unsigned_type (type)
VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
mode meeting these conditions.
- Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), if a mode
- whose size is UNITS_PER_WORD meets all the conditions, it is returned
- instead. */
+ Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
+ the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
+ all the conditions. */
enum machine_mode
get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
@@ -987,12 +987,25 @@ get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
|| (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
return VOIDmode;
- if (SLOW_BYTE_ACCESS
- && ! volatilep
- && BITS_PER_WORD <= MIN (align, BIGGEST_ALIGNMENT)
- && (largest_mode == VOIDmode
- || BITS_PER_WORD <= GET_MODE_BITSIZE (largest_mode)))
- return word_mode;
+ if (SLOW_BYTE_ACCESS && ! volatilep)
+ {
+ enum machine_mode wide_mode = VOIDmode, tmode;
+
+ for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
+ tmode = GET_MODE_WIDER_MODE (tmode))
+ {
+ unit = GET_MODE_BITSIZE (tmode);
+ if (bitpos / unit == (bitpos + bitsize - 1) / unit
+ && unit <= BITS_PER_WORD
+ && unit <= MIN (align, BIGGEST_ALIGNMENT)
+ && (largest_mode == VOIDmode
+ || unit <= GET_MODE_BITSIZE (largest_mode)))
+ wide_mode = tmode;
+ }
+
+ if (wide_mode != VOIDmode)
+ return wide_mode;
+ }
return mode;
}
diff --git a/gcc/varasm.c b/gcc/varasm.c
index 5b1d6fb..5c3f7e1 100644
--- a/gcc/varasm.c
+++ b/gcc/varasm.c
@@ -1541,7 +1541,8 @@ const_hash (exp)
& ((1 << HASHBITS) - 1)) % MAX_HASH_TABLE;
for (link = CONSTRUCTOR_ELTS (exp); link; link = TREE_CHAIN (link))
- hi = (hi * 603 + const_hash (TREE_VALUE (link))) % MAX_HASH_TABLE;
+ if (TREE_VALUE (link))
+ hi = (hi * 603 + const_hash (TREE_VALUE (link))) % MAX_HASH_TABLE;
return hi;
}
@@ -1677,8 +1678,22 @@ compare_constant_1 (exp, p)
}
for (link = CONSTRUCTOR_ELTS (exp); link; link = TREE_CHAIN (link))
- if ((p = compare_constant_1 (TREE_VALUE (link), p)) == 0)
- return 0;
+ {
+ if (TREE_VALUE (link))
+ {
+ if ((p = compare_constant_1 (TREE_VALUE (link), p)) == 0)
+ return 0;
+ }
+ else
+ {
+ tree zero = 0;
+
+ if (bcmp (&zero, p, sizeof zero))
+ return 0;
+ p += sizeof zero;
+ }
+ }
+
return p;
}
else if (code == ADDR_EXPR)
@@ -1798,7 +1813,17 @@ record_constant_1 (exp)
}
for (link = CONSTRUCTOR_ELTS (exp); link; link = TREE_CHAIN (link))
- record_constant_1 (TREE_VALUE (link));
+ {
+ if (TREE_VALUE (link))
+ record_constant_1 (TREE_VALUE (link));
+ else
+ {
+ tree zero = 0;
+
+ obstack_grow (&permanent_obstack, (char *) &zero, sizeof zero);
+ }
+ }
+
return;
}
else if (code == ADDR_EXPR)
@@ -2520,8 +2545,7 @@ output_constant (exp, size)
/* Allow a constructor with no elements for any data type.
This means to fill the space with zeros. */
- if (TREE_CODE (exp) == CONSTRUCTOR
- && TREE_OPERAND (exp, 1) == 0)
+ if (TREE_CODE (exp) == CONSTRUCTOR && CONSTRUCTOR_ELTS (exp) == 0)
{
assemble_zeros (size);
return;