aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Henderson <rth@gcc.gnu.org>2000-07-28 10:46:18 -0700
committerRichard Henderson <rth@gcc.gnu.org>2000-07-28 10:46:18 -0700
commit4b983fdc50c590dde021fae533d8c3745897fdae (patch)
treec6bbf69024ca51b064647b9762f5100f294e8b2b /gcc
parent777ba8d110978fc69481e5f50ebcca1fd39a8542 (diff)
downloadgcc-4b983fdc50c590dde021fae533d8c3745897fdae.zip
gcc-4b983fdc50c590dde021fae533d8c3745897fdae.tar.gz
gcc-4b983fdc50c590dde021fae533d8c3745897fdae.tar.bz2
cse.c (canon_hash): Handle PRE_MODIFY/POST_MODIFY.
* cse.c (canon_hash): Handle PRE_MODIFY/POST_MODIFY. (cse_insn): Likewise. (addr_affects_sp_p): Likewise. * expr.c (move_by_pieces): Likewise. (clear_by_pieces): Likewise. * gcse.c (oprs_unchanged_p): Likewise. * haifa-sched.c (sched_analyze_2): Likewise. * recog.c (offsettable_address_p): Likewise. * regclass.c (record_address_regs): Likewise. * reload.c (find_reusable_reload): Likewise. (push_reload): Likewise. (operands_match_p): Likewise. (decompose): Likewise. (find_reloads_address_1): Likewise. (find_inc_amount): Likewise. * reload1.c (elimination_effects): Likewise. * resource.c (mark_set_resources): Likewise. * flow.c (attempt_auto_inc): New function; mostly broken out of find_auto_inc. (find_auto_inc): Split into two functions and enhanced to generate POST_MODIFY. * rtl.def (PRE_MODIFY, POST_MODIFY): Adjust comment. * rtl.h (count_all_occurrences): Declare. (HAVE_{PRE,POST}_MODIFY_{DISP,REG}): Provide default of 0 if not defined. * rtlanal.c (count_all_occurrences): New function. * tm.texi (HAVE_POST_MODIFY_DISP, HAVE_PRE_MODIFY_DISP, HAVE_POST_MODIFY_REG, HAVE_PRE_MODIFY_REG): Document. * config/ia64/ia64-protos.h (destination_operand): Declare. * config/ia64/ia64.c (destination_operand): New function. (ia64_print_operand): Handle POST_MODIFY. (rtx_needs_barrier): Likewise. * config/ia64/ia64.h (HAVE_POST_MODIFY_DISP): Define to 1. (HAVE_POST_MODIFY_REG): Define to 1. (MAX_REGS_PER_ADDRESS): Change to 2. (GO_IF_LEGITIMATE_ADDRESS): Accept POST_MODIFY too. (LEGITIMATE_ADDRESS_REG): New helper macro. (LEGITIMATE_ADDRESS_DISP): Likewise. (PREDICATE_CODES): Add entry for destination_operand. * config/ia64/ia64.md (all mov patterns): Use destination_operand predicate for operand 0. From-SVN: r35321
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog53
-rw-r--r--gcc/Makefile.in2
-rw-r--r--gcc/config/ia64/ia64-protos.h9
-rw-r--r--gcc/config/ia64/ia64.c85
-rw-r--r--gcc/config/ia64/ia64.h40
-rw-r--r--gcc/config/ia64/ia64.md60
-rw-r--r--gcc/cse.c8
-rw-r--r--gcc/flow.c372
-rw-r--r--gcc/gcse.c2
-rw-r--r--gcc/haifa-sched.c8
-rw-r--r--gcc/recog.c3
-rw-r--r--gcc/regclass.c11
-rw-r--r--gcc/regmove.c5
-rw-r--r--gcc/reload.c134
-rw-r--r--gcc/reload.h2
-rw-r--r--gcc/reload1.c90
-rw-r--r--gcc/resource.c7
-rw-r--r--gcc/rtl.def24
-rw-r--r--gcc/rtl.h17
-rw-r--r--gcc/rtl.texi4
-rw-r--r--gcc/rtlanal.c63
-rw-r--r--gcc/simplify-rtx.c2
22 files changed, 670 insertions, 331 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index e97104a..af5255c 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,52 @@
+2000-07-28 Richard Henderson <rth@cygnus.com>
+
+ * emit-rtl.c (gen_lowpart_common): Add missing 'c' variable.
+
+2000-07-28 Bernd Schmidt <bernds@cygnus.co.uk>
+
+ * cse.c (canon_hash): Handle PRE_MODIFY/POST_MODIFY.
+ (cse_insn): Likewise.
+ (addr_affects_sp_p): Likewise.
+ * expr.c (move_by_pieces): Likewise.
+ (clear_by_pieces): Likewise.
+ * gcse.c (oprs_unchanged_p): Likewise.
+ * haifa-sched.c (sched_analyze_2): Likewise.
+ * recog.c (offsettable_address_p): Likewise.
+ * regclass.c (record_address_regs): Likewise.
+ * reload.c (find_reusable_reload): Likewise.
+ (push_reload): Likewise.
+ (operands_match_p): Likewise.
+ (decompose): Likewise.
+ (find_reloads_address_1): Likewise.
+ (find_inc_amount): Likewise.
+ * reload1.c (elimination_effects): Likewise.
+ * resource.c (mark_set_resources): Likewise.
+ * flow.c (attempt_auto_inc): New function; mostly broken out
+ of find_auto_inc.
+ (find_auto_inc): Split into two functions and enhanced to
+ generate POST_MODIFY.
+ * rtl.def (PRE_MODIFY, POST_MODIFY): Adjust comment.
+ * rtl.h (count_all_occurrences): Declare.
+ (HAVE_{PRE,POST}_MODIFY_{DISP,REG}): Provide default of 0 if not
+ defined.
+ * rtlanal.c (count_all_occurrences): New function.
+ * tm.texi (HAVE_POST_MODIFY_DISP, HAVE_PRE_MODIFY_DISP,
+ HAVE_POST_MODIFY_REG, HAVE_PRE_MODIFY_REG): Document.
+
+ * config/ia64/ia64-protos.h (destination_operand): Declare.
+ * config/ia64/ia64.c (destination_operand): New function.
+ (ia64_print_operand): Handle POST_MODIFY.
+ (rtx_needs_barrier): Likewise.
+ * config/ia64/ia64.h (HAVE_POST_MODIFY_DISP): Define to 1.
+ (HAVE_POST_MODIFY_REG): Define to 1.
+ (MAX_REGS_PER_ADDRESS): Change to 2.
+ (GO_IF_LEGITIMATE_ADDRESS): Accept POST_MODIFY too.
+ (LEGITIMATE_ADDRESS_REG): New helper macro.
+ (LEGITIMATE_ADDRESS_DISP): Likewise.
+ (PREDICATE_CODES): Add entry for destination_operand.
+ * config/ia64/ia64.md (all mov patterns): Use destination_operand
+ predicate for operand 0.
+
2000-07-28 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
* dwarf2out.c: Indent #error directive.
@@ -311,8 +360,8 @@ Mon Jul 24 02:04:52 2000 Jeffrey A Law (law@cygnus.com)
Sun Jul 23 14:49:12 2000 Jason Eckhardt <jle@cygnus.com>
- * config/i860/i860.md (untyped_call expander): Use GEN_CALL
- instead of gen_call.
+ * config/i860/i860.md (untyped_call expander): Use GEN_CALL
+ instead of gen_call.
Sun Jul 23 11:52:03 2000 George Helffrich (george@gly.bris.ac.uk)
diff --git a/gcc/Makefile.in b/gcc/Makefile.in
index 85e33d6..83213d1 100644
--- a/gcc/Makefile.in
+++ b/gcc/Makefile.in
@@ -1374,7 +1374,7 @@ alias.o : alias.c $(CONFIG_H) system.h $(RTL_H) flags.h hard-reg-set.h \
$(REGS_H) toplev.h output.h $(EXPR_H) insn-flags.h $(GGC_H) function.h \
cselib.h $(TREE_H)
regmove.o : regmove.c $(CONFIG_H) system.h $(RTL_H) insn-config.h \
- $(RECOG_H) output.h reload.h $(REGS_H) hard-reg-set.h flags.h function.h \
+ $(RECOG_H) output.h $(REGS_H) hard-reg-set.h flags.h function.h \
$(EXPR_H) insn-flags.h $(BASIC_BLOCK_H) toplev.h
haifa-sched.o : haifa-sched.c $(CONFIG_H) system.h $(RTL_H) \
$(BASIC_BLOCK_H) $(REGS_H) hard-reg-set.h flags.h insn-config.h function.h \
diff --git a/gcc/config/ia64/ia64-protos.h b/gcc/config/ia64/ia64-protos.h
index 8949c1df..00421f8 100644
--- a/gcc/config/ia64/ia64-protos.h
+++ b/gcc/config/ia64/ia64-protos.h
@@ -49,6 +49,15 @@ extern int reg_or_fp01_operand PARAMS((rtx, enum machine_mode));
extern int normal_comparison_operator PARAMS((rtx, enum machine_mode));
extern int adjusted_comparison_operator PARAMS((rtx, enum machine_mode));
extern int call_multiple_values_operation PARAMS((rtx, enum machine_mode));
+extern int destination_operand PARAMS((rtx, enum machine_mode));
+extern int ia64_rap_fp_offset PARAMS((void));
+extern unsigned int ia64_compute_frame_size PARAMS((int));
+extern void save_restore_insns PARAMS((int));
+extern void ia64_expand_prologue PARAMS((void));
+extern void ia64_expand_epilogue PARAMS((void));
+extern void ia64_function_prologue PARAMS((FILE *, int));
+extern void ia64_funtion_epilogue PARAMS((FILE *, int));
+extern int ia64_direct_return PARAMS((void));
extern int predicate_operator PARAMS((rtx, enum machine_mode));
extern int ia64_move_ok PARAMS((rtx, rtx));
diff --git a/gcc/config/ia64/ia64.c b/gcc/config/ia64/ia64.c
index 470acaa..c8de224 100644
--- a/gcc/config/ia64/ia64.c
+++ b/gcc/config/ia64/ia64.c
@@ -469,6 +469,23 @@ reg_or_fp01_operand (op, mode)
|| register_operand (op, mode));
}
+/* Like nonimmediate_operand, but don't allow MEMs that try to use a
+ POST_MODIFY with a REG as displacement. */
+
+int
+destination_operand (op, mode)
+ rtx op;
+ enum machine_mode mode;
+{
+ if (! nonimmediate_operand (op, mode))
+ return 0;
+ if (GET_CODE (op) == MEM
+ && GET_CODE (XEXP (op, 0)) == POST_MODIFY
+ && GET_CODE (XEXP (XEXP (XEXP (op, 0), 1), 1)) == REG)
+ return 0;
+ return 1;
+}
+
/* Return 1 if this is a comparison operator, which accepts an normal 8-bit
signed immediate operand. */
@@ -1971,28 +1988,47 @@ ia64_print_operand (file, x, code)
case 'P':
{
- int value;
+ HOST_WIDE_INT value;
- if (GET_CODE (XEXP (x, 0)) != POST_INC
- && GET_CODE (XEXP (x, 0)) != POST_DEC)
- return;
+ switch (GET_CODE (XEXP (x, 0)))
+ {
+ default:
+ return;
- fputs (", ", file);
+ case POST_MODIFY:
+ x = XEXP (XEXP (XEXP (x, 0), 1), 1);
+ if (GET_CODE (x) == CONST_INT)
+ value = INTVAL (y);
+ else if (GET_CODE (x) == REG)
+ {
+ fprintf (file, ", %s", reg_names[REGNO (y)]);
+ return;
+ }
+ else
+ abort ();
+ break;
- value = GET_MODE_SIZE (GET_MODE (x));
+ case POST_INC:
+ value = GET_MODE_SIZE (GET_MODE (x));
- /* ??? This is for ldf.fill and stf.spill which use XFmode, but which
- actually need 16 bytes increments. Perhaps we can change them
- to use TFmode instead. Or don't use POST_DEC/POST_INC for them.
- Currently, there are no other uses of XFmode, so hacking it here
- is no problem. */
- if (value == 12)
- value = 16;
+ /* ??? This is for ldf.fill and stf.spill which use XFmode,
+ but which actually need 16 bytes increments. Perhaps we
+ can change them to use TFmode instead. Or don't use
+ POST_DEC/POST_INC for them. */
+ if (value == 12)
+ value = 16;
+ break;
- if (GET_CODE (XEXP (x, 0)) == POST_DEC)
- value = -value;
+ case POST_DEC:
+ value = - GET_MODE_SIZE (GET_MODE (x));
+ if (value == -12)
+ value = -16;
+ break;
+ }
- fprintf (file, "%d", value);
+ putc (',', file);
+ putc (' ', file);
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, value);
return;
}
@@ -2074,8 +2110,6 @@ ia64_print_operand (file, x, code)
unsigned int regno = REGNO (XEXP (x, 0));
if (GET_CODE (x) == EQ)
regno += 1;
- if (code == 'j')
- regno ^= 1;
fprintf (file, "(%s) ", reg_names [regno]);
}
return;
@@ -2089,6 +2123,8 @@ ia64_print_operand (file, x, code)
{
/* This happens for the spill/restore instructions. */
case POST_INC:
+ case POST_DEC:
+ case POST_MODIFY:
x = XEXP (x, 0);
/* ... fall through ... */
@@ -2099,7 +2135,7 @@ ia64_print_operand (file, x, code)
case MEM:
{
rtx addr = XEXP (x, 0);
- if (GET_CODE (addr) == POST_INC || GET_CODE (addr) == POST_DEC)
+ if (GET_RTX_CLASS (GET_CODE (addr)) == 'a')
addr = XEXP (addr, 0);
fprintf (file, "[%s]", reg_names [REGNO (addr)]);
break;
@@ -2749,6 +2785,17 @@ rtx_needs_barrier (x, flags, pred)
need_barrier |= rws_access_reg (REGNO (XEXP (x, 0)), new_flags, pred);
break;
+ case POST_MODIFY:
+ if (GET_CODE (XEXP (x, 0)) != REG)
+ abort ();
+
+ new_flags.is_write = 0;
+ need_barrier = rws_access_reg (REGNO (XEXP (x, 0)), new_flags, pred);
+ need_barrier |= rtx_needs_barrier (XEXP (x, 1), new_flags, pred);
+ new_flags.is_write = 1;
+ need_barrier |= rws_access_reg (REGNO (XEXP (x, 0)), new_flags, pred);
+ break;
+
/* Handle common unary and binary ops for efficiency. */
case COMPARE: case PLUS: case MINUS: case MULT: case DIV:
case MOD: case UDIV: case UMOD: case AND: case IOR:
diff --git a/gcc/config/ia64/ia64.h b/gcc/config/ia64/ia64.h
index b7f9071..6379ae7 100644
--- a/gcc/config/ia64/ia64.h
+++ b/gcc/config/ia64/ia64.h
@@ -1790,6 +1790,8 @@ do { \
#define HAVE_POST_INCREMENT 1
#define HAVE_POST_DECREMENT 1
+#define HAVE_POST_MODIFY_DISP 1
+#define HAVE_POST_MODIFY_REG 1
/* A C expression that is 1 if the RTX X is a constant which is a valid
address. */
@@ -1798,31 +1800,38 @@ do { \
/* The max number of registers that can appear in a valid memory address. */
-#define MAX_REGS_PER_ADDRESS 1
+#define MAX_REGS_PER_ADDRESS 2
/* A C compound statement with a conditional `goto LABEL;' executed if X (an
RTX) is a legitimate memory address on the target machine for a memory
operand of mode MODE. */
-/* ??? IA64 post increment addressing mode is much more powerful than this. */
+#define LEGITIMATE_ADDRESS_REG(X) \
+ ((GET_CODE (X) == REG && REG_OK_FOR_BASE_P (X)) \
+ || (GET_CODE (X) == SUBREG && GET_CODE (XEXP (X, 0)) == REG \
+ && REG_OK_FOR_BASE_P (XEXP (X, 0))))
+
+#define LEGITIMATE_ADDRESS_DISP(R, X) \
+ (GET_CODE (X) == PLUS \
+ && rtx_equal_p (R, XEXP (X, 0)) \
+ && (GET_CODE (XEXP (X, 1)) == REG \
+ || (GET_CODE (XEXP (X, 1)) == CONST_INT \
+ && INTVAL (XEXP (X, 1)) >= -512 \
+ && INTVAL (XEXP (X, 1)) < 512)))
#define GO_IF_LEGITIMATE_ADDRESS(MODE, X, LABEL) \
do { \
- if (GET_CODE (X) == REG && REG_OK_FOR_BASE_P (X)) \
+ if (LEGITIMATE_ADDRESS_REG (X)) \
goto LABEL; \
- else if (GET_CODE (X) == SUBREG && GET_CODE (XEXP (X, 0)) == REG \
- && REG_OK_FOR_BASE_P (XEXP (X, 0))) \
+ else if ((GET_CODE (X) == POST_INC || GET_CODE (X) == POST_DEC) \
+ && LEGITIMATE_ADDRESS_REG (XEXP (X, 0)) \
+ && XEXP (X, 0) != arg_pointer_rtx) \
+ goto LABEL; \
+ else if (GET_CODE (X) == POST_MODIFY \
+ && LEGITIMATE_ADDRESS_REG (XEXP (X, 0)) \
+ && XEXP (X, 0) != arg_pointer_rtx \
+ && LEGITIMATE_ADDRESS_DISP (XEXP (X, 0), XEXP (X, 1))) \
goto LABEL; \
- else if (GET_CODE (X) == POST_INC || GET_CODE (X) == POST_DEC) \
- { \
- if (GET_CODE (XEXP (X, 0)) == REG \
- && REG_OK_FOR_BASE_P (XEXP (X, 0))) \
- goto LABEL; \
- else if (GET_CODE (XEXP (X, 0)) == SUBREG \
- && GET_CODE (XEXP (XEXP (X, 0), 0)) == REG \
- && REG_OK_FOR_BASE_P (XEXP (XEXP (X, 0), 0))) \
- goto LABEL; \
- } \
} while (0)
/* A C expression that is nonzero if X (assumed to be a `reg' RTX) is valid for
@@ -2719,6 +2728,7 @@ do { \
{ "symbolic_operand", {SYMBOL_REF, CONST, LABEL_REF}}, \
{ "function_operand", {SYMBOL_REF}}, \
{ "setjmp_operand", {SYMBOL_REF}}, \
+{ "destination_operand", {SUBREG, REG, MEM}}, \
{ "move_operand", {SUBREG, REG, MEM, CONST_INT, CONST_DOUBLE, \
CONSTANT_P_RTX, SYMBOL_REF, CONST, LABEL_REF}}, \
{ "reg_or_0_operand", {SUBREG, REG, CONST_INT}}, \
diff --git a/gcc/config/ia64/ia64.md b/gcc/config/ia64/ia64.md
index d5e8b5e..872ef6b 100644
--- a/gcc/config/ia64/ia64.md
+++ b/gcc/config/ia64/ia64.md
@@ -201,8 +201,8 @@
(set_attr "predicable" "no")])
(define_insn "*movqi_internal_astep"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r, m, r,*f,*f")
- (match_operand:QI 1 "move_operand" "rO,J,m,rO,*f,rO,*f"))]
+ [(set (match_operand:QI 0 "destination_operand" "=r,r,r, m, r,*f,*f")
+ (match_operand:QI 1 "move_operand" "rO,J,m,rO,*f,rO,*f"))]
"TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %r1
@@ -216,8 +216,8 @@
(set_attr "predicable" "no")])
(define_insn "*movqi_internal"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r, m, r,*f,*f")
- (match_operand:QI 1 "move_operand" "rO,J,m,rO,*f,rO,*f"))]
+ [(set (match_operand:QI 0 "destination_operand" "=r,r,r, m, r,*f,*f")
+ (match_operand:QI 1 "move_operand" "rO,J,m,rO,*f,rO,*f"))]
"! TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %r1
@@ -259,8 +259,8 @@
(set_attr "predicable" "no")])
(define_insn "*movhi_internal_astep"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r, m, r,*f,*f")
- (match_operand:HI 1 "move_operand" "rO,J,m,rO,*f,rO,*f"))]
+ [(set (match_operand:HI 0 "destination_operand" "=r,r,r, m, r,*f,*f")
+ (match_operand:HI 1 "move_operand" "rO,J,m,rO,*f,rO,*f"))]
"TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %r1
@@ -274,8 +274,8 @@
(set_attr "predicable" "no")])
(define_insn "*movhi_internal"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r, m, r,*f,*f")
- (match_operand:HI 1 "move_operand" "rO,J,m,rO,*f,rO,*f"))]
+ [(set (match_operand:HI 0 "destination_operand" "=r,r,r, m, r,*f,*f")
+ (match_operand:HI 1 "move_operand" "rO,J,m,rO,*f,rO,*f"))]
"! TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %r1
@@ -318,8 +318,8 @@
(set_attr "predicable" "no")])
(define_insn "*movsi_internal_astep"
- [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r,r, m, r,*f,*f")
- (match_operand:SI 1 "move_operand" "rO,J,i,m,rO,*f,rO,*f"))]
+ [(set (match_operand:SI 0 "destination_operand" "=r,r,r,r, m, r,*f,*f")
+ (match_operand:SI 1 "move_operand" "rO,J,i,m,rO,*f,rO,*f"))]
"TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %r1
@@ -334,8 +334,8 @@
(set_attr "predicable" "no")])
(define_insn "*movsi_internal"
- [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r,r, m, r,*f,*f")
- (match_operand:SI 1 "move_operand" "rO,J,i,m,rO,*f,rO,*f"))]
+ [(set (match_operand:SI 0 "destination_operand" "=r,r,r,r, m, r,*f,*f")
+ (match_operand:SI 1 "move_operand" "rO,J,i,m,rO,*f,rO,*f"))]
"! TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %r1
@@ -407,8 +407,10 @@
(set_attr "predicable" "no")])
(define_insn "*movdi_internal_astep"
- [(set (match_operand:DI 0 "nonimmediate_operand" "=r,r,r,r, m,r,*f,*f,*f,Q, r,*b")
- (match_operand:DI 1 "move_operand" "rO,J,i,m,rO,*f,rO,*f,Q,*f,*b,rO"))]
+ [(set (match_operand:DI 0 "destination_operand"
+ "=r,r,r,r, m, r,*f,*f,*f, Q, r,*b")
+ (match_operand:DI 1 "move_operand"
+ "rO,J,i,m,rO,*f,rO,*f, Q,*f,*b,rO"))]
"TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"*
{
@@ -437,8 +439,10 @@
(set_attr "predicable" "no")])
(define_insn "*movdi_internal"
- [(set (match_operand:DI 0 "nonimmediate_operand" "=r,r,r,r, m,r,*f,*f,*f,Q, r,*b")
- (match_operand:DI 1 "move_operand" "rO,J,i,m,rO,*f,rO,*f,Q,*f,*b,rO"))]
+ [(set (match_operand:DI 0 "destination_operand"
+ "=r,r,r,r, m, r,*f,*f,*f, Q, r,*b")
+ (match_operand:DI 1 "move_operand"
+ "rO,J,i,m,rO,*f,rO,*f, Q,*f,*b,rO"))]
"! TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"*
{
@@ -573,8 +577,8 @@
(set_attr "predicable" "no")])
(define_insn "*movsf_internal_astep"
- [(set (match_operand:SF 0 "nonimmediate_operand" "=f,f, Q,*r, f,*r,*r, m")
- (match_operand:SF 1 "general_operand" "fG,Q,fG,fG,*r,*r, m,*r"))]
+ [(set (match_operand:SF 0 "destination_operand" "=f,f, Q,*r, f,*r,*r, m")
+ (match_operand:SF 1 "general_operand" "fG,Q,fG,fG,*r,*r, m,*r"))]
"TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %F1
@@ -589,8 +593,8 @@
(set_attr "predicable" "no")])
(define_insn "*movsf_internal"
- [(set (match_operand:SF 0 "nonimmediate_operand" "=f,f, Q,*r, f,*r,*r, m")
- (match_operand:SF 1 "general_operand" "fG,Q,fG,fG,*r,*r, m,*r"))]
+ [(set (match_operand:SF 0 "destination_operand" "=f,f, Q,*r, f,*r,*r, m")
+ (match_operand:SF 1 "general_operand" "fG,Q,fG,fG,*r,*r, m,*r"))]
"! TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %F1
@@ -632,8 +636,8 @@
(set_attr "predicable" "no")])
(define_insn "*movdf_internal_astep"
- [(set (match_operand:DF 0 "nonimmediate_operand" "=f,f, Q,*r, f,*r,*r, m")
- (match_operand:DF 1 "general_operand" "fG,Q,fG,fG,*r,*r, m,*r"))]
+ [(set (match_operand:DF 0 "destination_operand" "=f,f, Q,*r, f,*r,*r, m")
+ (match_operand:DF 1 "general_operand" "fG,Q,fG,fG,*r,*r, m,*r"))]
"TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %F1
@@ -648,8 +652,8 @@
(set_attr "predicable" "no")])
(define_insn "*movdf_internal"
- [(set (match_operand:DF 0 "nonimmediate_operand" "=f,f, Q,*r, f,*r,*r, m")
- (match_operand:DF 1 "general_operand" "fG,Q,fG,fG,*r,*r, m,*r"))]
+ [(set (match_operand:DF 0 "destination_operand" "=f,f, Q,*r, f,*r,*r, m")
+ (match_operand:DF 1 "general_operand" "fG,Q,fG,fG,*r,*r, m,*r"))]
"! TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %F1
@@ -689,8 +693,8 @@
(set_attr "predicable" "no")])
(define_insn "*movxf_internal_astep"
- [(set (match_operand:XF 0 "nonimmediate_operand" "=f,f, m")
- (match_operand:XF 1 "general_operand" "fG,m,fG"))]
+ [(set (match_operand:XF 0 "destination_operand" "=f,f, m")
+ (match_operand:XF 1 "general_operand" "fG,m,fG"))]
"TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %F1
@@ -700,8 +704,8 @@
(set_attr "predicable" "no")])
(define_insn "*movxf_internal"
- [(set (match_operand:XF 0 "nonimmediate_operand" "=f,f, m")
- (match_operand:XF 1 "general_operand" "fG,m,fG"))]
+ [(set (match_operand:XF 0 "destination_operand" "=f,f, m")
+ (match_operand:XF 1 "general_operand" "fG,m,fG"))]
"! TARGET_A_STEP && ia64_move_ok (operands[0], operands[1])"
"@
mov %0 = %F1
diff --git a/gcc/cse.c b/gcc/cse.c
index 15210d8..69ef933 100644
--- a/gcc/cse.c
+++ b/gcc/cse.c
@@ -2271,6 +2271,8 @@ canon_hash (x, mode)
case PRE_INC:
case POST_DEC:
case POST_INC:
+ case PRE_MODIFY:
+ case POST_MODIFY:
case PC:
case CC0:
case CALL:
@@ -5472,8 +5474,7 @@ cse_insn (insn, libcall_insn)
#ifdef PUSH_ROUNDING
/* Stack pushes invalidate the stack pointer. */
rtx addr = XEXP (dest, 0);
- if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
- || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
+ if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
&& XEXP (addr, 0) == stack_pointer_rtx)
invalidate (stack_pointer_rtx, Pmode);
#endif
@@ -6085,8 +6086,7 @@ static int
addr_affects_sp_p (addr)
register rtx addr;
{
- if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
- || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
+ if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
&& GET_CODE (XEXP (addr, 0)) == REG
&& REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
{
diff --git a/gcc/flow.c b/gcc/flow.c
index a8f6061..7749d89 100644
--- a/gcc/flow.c
+++ b/gcc/flow.c
@@ -372,6 +372,8 @@ static rtx not_reg_cond PARAMS ((rtx));
static rtx nand_reg_cond PARAMS ((rtx, rtx));
#endif
#ifdef AUTO_INC_DEC
+static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
+ rtx, rtx, rtx, rtx, rtx));
static void find_auto_inc PARAMS ((struct propagate_block_info *,
rtx, rtx));
static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
@@ -4800,6 +4802,156 @@ nand_reg_cond (old, x)
#ifdef AUTO_INC_DEC
+/* Try to substitute the auto-inc expression INC as the address inside
+ MEM which occurs in INSN. Currently, the address of MEM is an expression
+ involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
+ that has a single set whose source is a PLUS of INCR_REG and something
+ else. */
+
+static void
+attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
+ struct propagate_block_info *pbi;
+ rtx inc, insn, mem, incr, incr_reg;
+{
+ int regno = REGNO (incr_reg);
+ rtx set = single_set (incr);
+ rtx q = SET_DEST (set);
+ rtx y = SET_SRC (set);
+ int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
+
+ /* Make sure this reg appears only once in this insn. */
+ if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
+ return;
+
+ if (dead_or_set_p (incr, incr_reg)
+ /* Mustn't autoinc an eliminable register. */
+ && (regno >= FIRST_PSEUDO_REGISTER
+ || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
+ {
+ /* This is the simple case. Try to make the auto-inc. If
+ we can't, we are done. Otherwise, we will do any
+ needed updates below. */
+ if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
+ return;
+ }
+ else if (GET_CODE (q) == REG
+ /* PREV_INSN used here to check the semi-open interval
+ [insn,incr). */
+ && ! reg_used_between_p (q, PREV_INSN (insn), incr)
+ /* We must also check for sets of q as q may be
+ a call clobbered hard register and there may
+ be a call between PREV_INSN (insn) and incr. */
+ && ! reg_set_between_p (q, PREV_INSN (insn), incr))
+ {
+ /* We have *p followed sometime later by q = p+size.
+ Both p and q must be live afterward,
+ and q is not used between INSN and its assignment.
+ Change it to q = p, ...*q..., q = q+size.
+ Then fall into the usual case. */
+ rtx insns, temp;
+ basic_block bb;
+
+ start_sequence ();
+ emit_move_insn (q, incr_reg);
+ insns = get_insns ();
+ end_sequence ();
+
+ if (basic_block_for_insn)
+ for (temp = insns; temp; temp = NEXT_INSN (temp))
+ set_block_for_insn (temp, pbi->bb);
+
+ /* If we can't make the auto-inc, or can't make the
+ replacement into Y, exit. There's no point in making
+ the change below if we can't do the auto-inc and doing
+ so is not correct in the pre-inc case. */
+
+ XEXP (inc, 0) = q;
+ validate_change (insn, &XEXP (mem, 0), inc, 1);
+ validate_change (incr, &XEXP (y, opnum), q, 1);
+ if (! apply_change_group ())
+ return;
+
+ /* We now know we'll be doing this change, so emit the
+ new insn(s) and do the updates. */
+ emit_insns_before (insns, insn);
+
+ if (pbi->bb->head == insn)
+ pbi->bb->head = insns;
+
+ /* INCR will become a NOTE and INSN won't contain a
+ use of INCR_REG. If a use of INCR_REG was just placed in
+ the insn before INSN, make that the next use.
+ Otherwise, invalidate it. */
+ if (GET_CODE (PREV_INSN (insn)) == INSN
+ && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
+ && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
+ pbi->reg_next_use[regno] = PREV_INSN (insn);
+ else
+ pbi->reg_next_use[regno] = 0;
+
+ incr_reg = q;
+ regno = REGNO (q);
+
+ /* REGNO is now used in INCR which is below INSN, but
+ it previously wasn't live here. If we don't mark
+ it as live, we'll put a REG_DEAD note for it
+ on this insn, which is incorrect. */
+ SET_REGNO_REG_SET (pbi->reg_live, regno);
+
+ /* If there are any calls between INSN and INCR, show
+ that REGNO now crosses them. */
+ for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
+ if (GET_CODE (temp) == CALL_INSN)
+ REG_N_CALLS_CROSSED (regno)++;
+ }
+ else
+ return;
+
+ /* If we haven't returned, it means we were able to make the
+ auto-inc, so update the status. First, record that this insn
+ has an implicit side effect. */
+
+ REG_NOTES (insn)
+ = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
+
+ /* Modify the old increment-insn to simply copy
+ the already-incremented value of our register. */
+ if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
+ abort ();
+
+ /* If that makes it a no-op (copying the register into itself) delete
+ it so it won't appear to be a "use" and a "set" of this
+ register. */
+ if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
+ {
+ /* If the original source was dead, it's dead now. */
+ rtx note;
+
+ while (note = find_reg_note (incr, REG_DEAD, NULL_RTX))
+ {
+ remove_note (incr, note);
+ if (XEXP (note, 0) != incr_reg)
+ CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
+ }
+
+ PUT_CODE (incr, NOTE);
+ NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
+ NOTE_SOURCE_FILE (incr) = 0;
+ }
+
+ if (regno >= FIRST_PSEUDO_REGISTER)
+ {
+ /* Count an extra reference to the reg. When a reg is
+ incremented, spilling it is worse, so we want to make
+ that less likely. */
+ REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
+
+ /* Count the increment as a setting of the register,
+ even though it isn't a SET in rtl. */
+ REG_N_SETS (regno)++;
+ }
+}
+
/* X is a MEM found in INSN. See if we can convert it into an auto-increment
reference. */
@@ -4811,7 +4963,12 @@ find_auto_inc (pbi, x, insn)
{
rtx addr = XEXP (x, 0);
HOST_WIDE_INT offset = 0;
- rtx set;
+ rtx set, y, incr, inc_val;
+ int regno;
+ int size = GET_MODE_SIZE (GET_MODE (x));
+
+ if (GET_CODE (insn) == JUMP_INSN)
+ return;
/* Here we detect use of an index register which might be good for
postincrement, postdecrement, preincrement, or predecrement. */
@@ -4819,170 +4976,69 @@ find_auto_inc (pbi, x, insn)
if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
- if (GET_CODE (addr) == REG)
- {
- register rtx y;
- register int size = GET_MODE_SIZE (GET_MODE (x));
- rtx use;
- rtx incr;
- int regno = REGNO (addr);
-
- /* Is the next use an increment that might make auto-increment? */
- if ((incr = pbi->reg_next_use[regno]) != 0
- && (set = single_set (incr)) != 0
- && GET_CODE (set) == SET
- && BLOCK_NUM (incr) == BLOCK_NUM (insn)
- /* Can't add side effects to jumps; if reg is spilled and
- reloaded, there's no way to store back the altered value. */
- && GET_CODE (insn) != JUMP_INSN
- && (y = SET_SRC (set), GET_CODE (y) == PLUS)
- && XEXP (y, 0) == addr
- && GET_CODE (XEXP (y, 1)) == CONST_INT
- && ((HAVE_POST_INCREMENT
- && (INTVAL (XEXP (y, 1)) == size && offset == 0))
- || (HAVE_POST_DECREMENT
- && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
- || (HAVE_PRE_INCREMENT
- && (INTVAL (XEXP (y, 1)) == size && offset == size))
- || (HAVE_PRE_DECREMENT
- && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
- /* Make sure this reg appears only once in this insn. */
- && (use = find_use_as_address (PATTERN (insn), addr, offset),
- use != 0 && use != (rtx) 1))
- {
- rtx q = SET_DEST (set);
- enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
- ? (offset ? PRE_INC : POST_INC)
- : (offset ? PRE_DEC : POST_DEC));
-
- if (dead_or_set_p (incr, addr)
- /* Mustn't autoinc an eliminable register. */
- && (regno >= FIRST_PSEUDO_REGISTER
- || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
- {
- /* This is the simple case. Try to make the auto-inc. If
- we can't, we are done. Otherwise, we will do any
- needed updates below. */
- if (! validate_change (insn, &XEXP (x, 0),
- gen_rtx_fmt_e (inc_code, Pmode, addr),
- 0))
- return;
- }
- else if (GET_CODE (q) == REG
- /* PREV_INSN used here to check the semi-open interval
- [insn,incr). */
- && ! reg_used_between_p (q, PREV_INSN (insn), incr)
- /* We must also check for sets of q as q may be
- a call clobbered hard register and there may
- be a call between PREV_INSN (insn) and incr. */
- && ! reg_set_between_p (q, PREV_INSN (insn), incr))
- {
- /* We have *p followed sometime later by q = p+size.
- Both p and q must be live afterward,
- and q is not used between INSN and its assignment.
- Change it to q = p, ...*q..., q = q+size.
- Then fall into the usual case. */
- rtx insns, temp;
-
- start_sequence ();
- emit_move_insn (q, addr);
- insns = get_insns ();
- end_sequence ();
-
- if (basic_block_for_insn)
- for (temp = insns; temp; temp = NEXT_INSN (temp))
- set_block_for_insn (temp, pbi->bb);
-
- /* If we can't make the auto-inc, or can't make the
- replacement into Y, exit. There's no point in making
- the change below if we can't do the auto-inc and doing
- so is not correct in the pre-inc case. */
-
- validate_change (insn, &XEXP (x, 0),
- gen_rtx_fmt_e (inc_code, Pmode, q),
- 1);
- validate_change (incr, &XEXP (y, 0), q, 1);
- if (! apply_change_group ())
- return;
-
- /* We now know we'll be doing this change, so emit the
- new insn(s) and do the updates. */
- emit_insns_before (insns, insn);
-
- if (pbi->bb->head == insn)
- pbi->bb->head = insns;
-
- /* INCR will become a NOTE and INSN won't contain a
- use of ADDR. If a use of ADDR was just placed in
- the insn before INSN, make that the next use.
- Otherwise, invalidate it. */
- if (GET_CODE (PREV_INSN (insn)) == INSN
- && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
- && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
- pbi->reg_next_use[regno] = PREV_INSN (insn);
- else
- pbi->reg_next_use[regno] = 0;
-
- addr = q;
- regno = REGNO (q);
-
- /* REGNO is now used in INCR which is below INSN, but it
- previously wasn't live here. If we don't mark it as
- live, we'll put a REG_DEAD note for it on this insn,
- which is incorrect. */
- SET_REGNO_REG_SET (pbi->reg_live, regno);
-
- /* If there are any calls between INSN and INCR, show
- that REGNO now crosses them. */
- for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
- if (GET_CODE (temp) == CALL_INSN)
- REG_N_CALLS_CROSSED (regno)++;
- }
- else
- return;
-
- /* If we haven't returned, it means we were able to make the
- auto-inc, so update the status. First, record that this insn
- has an implicit side effect. */
+ if (GET_CODE (addr) != REG)
+ return;
- REG_NOTES (insn)
- = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
+ regno = REGNO (addr);
- /* Modify the old increment-insn to simply copy
- the already-incremented value of our register. */
- if (! validate_change (incr, &SET_SRC (set), addr, 0))
- abort ();
+ /* Is the next use an increment that might make auto-increment? */
+ incr = pbi->reg_next_use[regno];
+ if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
+ return;
+ set = single_set (incr);
+ if (set == 0 || GET_CODE (set) != SET)
+ return;
+ y = SET_SRC (set);
- /* If that makes it a no-op (copying the register into itself) delete
- it so it won't appear to be a "use" and a "set" of this
- register. */
- if (SET_DEST (set) == addr)
- {
- /* If the original source was dead, it's dead now. */
- rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
- if (note && XEXP (note, 0) != addr)
- CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
-
- PUT_CODE (incr, NOTE);
- NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (incr) = 0;
- }
+ if (GET_CODE (y) != PLUS)
+ return;
- if (regno >= FIRST_PSEUDO_REGISTER)
- {
- /* Count an extra reference to the reg. When a reg is
- incremented, spilling it is worse, so we want to make
- that less likely. */
- REG_N_REFS (regno) += (optimize_size ? 1
- : pbi->bb->loop_depth + 1);
+ if (REGNO (XEXP (y, 0)) == REGNO (addr))
+ inc_val = XEXP (y, 1);
+ else if (REGNO (XEXP (y, 1)) == REGNO (addr))
+ inc_val = XEXP (y, 0);
+ else
+ abort ();
- /* Count the increment as a setting of the register,
- even though it isn't a SET in rtl. */
- REG_N_SETS (regno)++;
- }
- }
+ if (GET_CODE (inc_val) == CONST_INT)
+ {
+ if (HAVE_POST_INCREMENT
+ && (INTVAL (inc_val) == size && offset == 0))
+ attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
+ incr, addr);
+ else if (HAVE_POST_DECREMENT
+ && (INTVAL (inc_val) == - size && offset == 0))
+ attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
+ incr, addr);
+ else if (HAVE_PRE_INCREMENT
+ && (INTVAL (inc_val) == size && offset == size))
+ attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
+ incr, addr);
+ else if (HAVE_PRE_DECREMENT
+ && (INTVAL (inc_val) == - size && offset == - size))
+ attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
+ incr, addr);
+ else if (HAVE_POST_MODIFY_DISP && offset == 0)
+ attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
+ gen_rtx_PLUS (Pmode,
+ addr,
+ inc_val)),
+ insn, x, incr, addr);
+ }
+ else if (GET_CODE (inc_val) == REG
+ && ! reg_set_between_p (inc_val, PREV_INSN (insn),
+ NEXT_INSN (incr)))
+
+ {
+ if (HAVE_POST_MODIFY_REG && offset == 0)
+ attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
+ gen_rtx_PLUS (Pmode,
+ addr,
+ inc_val)),
+ insn, x, incr, addr);
}
}
+
#endif /* AUTO_INC_DEC */
static void
diff --git a/gcc/gcse.c b/gcc/gcse.c
index 6275c09..cec2c88 100644
--- a/gcc/gcse.c
+++ b/gcc/gcse.c
@@ -1250,6 +1250,8 @@ oprs_unchanged_p (x, insn, avail_p)
case PRE_INC:
case POST_DEC:
case POST_INC:
+ case PRE_MODIFY:
+ case POST_MODIFY:
return 0;
case PC:
diff --git a/gcc/haifa-sched.c b/gcc/haifa-sched.c
index 63bdce0..8878cca 100644
--- a/gcc/haifa-sched.c
+++ b/gcc/haifa-sched.c
@@ -3625,6 +3625,14 @@ sched_analyze_2 (deps, x, insn)
sched_analyze_1 (deps, x, insn);
return;
+ case POST_MODIFY:
+ case PRE_MODIFY:
+ /* op0 = op0 + op1 */
+ sched_analyze_2 (deps, XEXP (x, 0), insn);
+ sched_analyze_2 (deps, XEXP (x, 1), insn);
+ sched_analyze_1 (deps, x, insn);
+ return;
+
default:
break;
}
diff --git a/gcc/recog.c b/gcc/recog.c
index a8d06bb..db1c913 100644
--- a/gcc/recog.c
+++ b/gcc/recog.c
@@ -1885,8 +1885,7 @@ offsettable_address_p (strictp, mode, y)
return good;
}
- if (ycode == PRE_DEC || ycode == PRE_INC
- || ycode == POST_DEC || ycode == POST_INC)
+ if (GET_RTX_CLASS (ycode) == 'a')
return 0;
/* The offset added here is chosen as the maximum offset that
diff --git a/gcc/regclass.c b/gcc/regclass.c
index eb909d1..4439857 100644
--- a/gcc/regclass.c
+++ b/gcc/regclass.c
@@ -1952,6 +1952,17 @@ record_address_regs (x, class, scale)
}
break;
+ /* Double the importance of a pseudo register that is incremented
+ or decremented, since it would take two extra insns
+ if it ends up in the wrong place. */
+ case POST_MODIFY:
+ case PRE_MODIFY:
+ record_address_regs (XEXP (x, 0), BASE_REG_CLASS, 2 * scale);
+ if (REG_P (XEXP (XEXP (x, 1), 1)))
+ record_address_regs (XEXP (XEXP (x, 1), 1),
+ INDEX_REG_CLASS, 2 * scale);
+ break;
+
case POST_INC:
case PRE_INC:
case POST_DEC:
diff --git a/gcc/regmove.c b/gcc/regmove.c
index 44d63e2..69d7f88 100644
--- a/gcc/regmove.c
+++ b/gcc/regmove.c
@@ -32,7 +32,6 @@ Boston, MA 02111-1307, USA. */
#include "insn-config.h"
#include "recog.h"
#include "output.h"
-#include "reload.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "flags.h"
@@ -1185,7 +1184,7 @@ regmove_optimize (f, nregs, regmove_dump_file)
continue;
if (match.early_clobber[match_no]
- && count_occurrences (PATTERN (insn), src) > 1)
+ && count_occurrences (PATTERN (insn), src, 0) > 1)
continue;
/* Make sure match_operand is the destination. */
@@ -1289,7 +1288,7 @@ regmove_optimize (f, nregs, regmove_dump_file)
continue;
if (match.early_clobber[match_no]
- && count_occurrences (PATTERN (insn), src) > 1)
+ && count_occurrences (PATTERN (insn), src, 0) > 1)
continue;
/* Make sure match_no is the destination. */
diff --git a/gcc/reload.c b/gcc/reload.c
index d519708..01286ba 100644
--- a/gcc/reload.c
+++ b/gcc/reload.c
@@ -748,18 +748,11 @@ find_reusable_reload (p_in, out, class, type, opnum, dont_share)
true_regnum (rld[i].reg_rtx)))
&& out == 0 && rld[i].out == 0 && rld[i].in != 0
&& ((GET_CODE (in) == REG
- && (GET_CODE (rld[i].in) == POST_INC
- || GET_CODE (rld[i].in) == POST_DEC
- || GET_CODE (rld[i].in) == PRE_INC
- || GET_CODE (rld[i].in) == PRE_DEC)
+ && GET_RTX_CLASS (GET_CODE (rld[i].in)) == 'a'
&& MATCHES (XEXP (rld[i].in, 0), in))
- ||
- (GET_CODE (rld[i].in) == REG
- && (GET_CODE (in) == POST_INC
- || GET_CODE (in) == POST_DEC
- || GET_CODE (in) == PRE_INC
- || GET_CODE (in) == PRE_DEC)
- && MATCHES (XEXP (in, 0), rld[i].in)))
+ || (GET_CODE (rld[i].in) == REG
+ && GET_RTX_CLASS (GET_CODE (in)) == 'a'
+ && MATCHES (XEXP (in, 0), rld[i].in)))
&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
&& (reg_class_size[(int) class] == 1 || SMALL_REGISTER_CLASSES)
&& MERGABLE_RELOADS (type, rld[i].when_needed,
@@ -865,7 +858,8 @@ push_reload (in, out, inloc, outloc, class,
if (in != 0 && out != 0 && GET_CODE (in) == MEM && rtx_equal_p (in, out))
{
if (GET_CODE (XEXP (in, 0)) == POST_INC
- || GET_CODE (XEXP (in, 0)) == POST_DEC)
+ || GET_CODE (XEXP (in, 0)) == POST_DEC
+ || GET_CODE (XEXP (in, 0)) == POST_MODIFY)
{
rtx new = gen_rtx_MEM (GET_MODE (in), XEXP (XEXP (in, 0), 0));
@@ -873,7 +867,8 @@ push_reload (in, out, inloc, outloc, class,
in = new;
}
if (GET_CODE (XEXP (in, 0)) == PRE_INC
- || GET_CODE (XEXP (in, 0)) == PRE_DEC)
+ || GET_CODE (XEXP (in, 0)) == PRE_DEC
+ || GET_CODE (XEXP (in, 0)) == PRE_MODIFY)
{
rtx new = gen_rtx_MEM (GET_MODE (out), XEXP (XEXP (out, 0), 0));
@@ -2052,7 +2047,7 @@ operands_match_p (x, y)
because the assembler insn would increment only once.
On the other hand, an postincrement matches ordinary indexing
if the postincrement is the output operand. */
- if (code == POST_DEC || code == POST_INC)
+ if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
return operands_match_p (XEXP (x, 0), y);
/* Two preincrements are invalid
because the assembler insn would increment only once.
@@ -2060,7 +2055,8 @@ operands_match_p (x, y)
if the preincrement is the input operand.
In this case, return 2, since some callers need to do special
things when this happens. */
- if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC)
+ if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
+ || GET_CODE (y) == PRE_MODIFY)
return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
slow:
@@ -2168,6 +2164,20 @@ decompose (x)
return val;
}
+ if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
+ {
+ if (GET_CODE (XEXP (addr, 1)) == PLUS
+ && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
+ && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
+ {
+ val.base = XEXP (addr, 0);
+ val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
+ val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
+ val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
+ return val;
+ }
+ }
+
if (GET_CODE (addr) == CONST)
{
addr = XEXP (addr, 0);
@@ -5095,6 +5105,86 @@ find_reloads_address_1 (mode, x, context, loc, opnum, type, ind_levels, insn)
return 0;
+ case POST_MODIFY:
+ case PRE_MODIFY:
+ {
+ rtx op0 = XEXP (x, 0);
+ rtx op1 = XEXP (x, 1);
+
+ if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
+ return 0;
+
+ /* Currently, we only support {PRE,POST}_MODIFY constructs
+ where a base register is {inc,dec}remented by the contents
+ of another register or by a constant value. Thus, these
+ operands must match. */
+ if (op0 != XEXP (op1, 0))
+ abort();
+
+ /* Require index register (or constant). Let's just handle the
+ register case in the meantime... If the target allows
+ auto-modify by a constant then we could try replacing a pseudo
+ register with its equivalent constant where applicable. */
+ if (REG_P (XEXP (op1, 1)))
+ if (!REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
+ find_reloads_address_1 (mode, XEXP (op1, 1), 1, &XEXP (op1, 1),
+ opnum, type, ind_levels, insn);
+
+ if (REG_P (XEXP (op1, 0)))
+ {
+ register int regno = REGNO (XEXP (op1, 0));
+
+ /* A register that is incremented cannot be constant! */
+ if (regno >= FIRST_PSEUDO_REGISTER
+ && reg_equiv_constant[regno] != 0)
+ abort ();
+
+ /* Handle a register that is equivalent to a memory location
+ which cannot be addressed directly. */
+ if (reg_equiv_memory_loc[regno] != 0
+ && (reg_equiv_address[regno] != 0
+ || num_not_at_initial_offset))
+ {
+ rtx tem = make_memloc (XEXP (x, 0), regno);
+
+ if (reg_equiv_address[regno]
+ || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
+ {
+ /* First reload the memory location's address.
+ We can't use ADDR_TYPE (type) here, because we need to
+ write back the value after reading it, hence we actually
+ need two registers. */
+ find_reloads_address (GET_MODE (tem), 0, XEXP (tem, 0),
+ &XEXP (tem, 0), opnum, type,
+ ind_levels, insn);
+
+ /* Then reload the memory location into a base
+ register. */
+ push_reload (tem, tem, &XEXP (x, 0), &XEXP (op1, 0),
+ BASE_REG_CLASS, GET_MODE (x), GET_MODE (x),
+ 0, 0, opnum, RELOAD_OTHER);
+ break;
+ }
+ }
+
+ if (reg_renumber[regno] >= 0)
+ regno = reg_renumber[regno];
+
+ /* We require a base register here... */
+ if (!REGNO_MODE_OK_FOR_BASE_P (regno, GET_MODE (x)))
+ {
+ push_reload (XEXP (op1, 0), XEXP (x, 0),
+ &XEXP (op1, 0), &XEXP (x, 0),
+ BASE_REG_CLASS,
+ GET_MODE (x), GET_MODE (x), 0, 0,
+ opnum, RELOAD_OTHER);
+ }
+ }
+ else
+ abort();
+ }
+ return 0;
+
case POST_INC:
case POST_DEC:
case PRE_INC:
@@ -5988,6 +6078,8 @@ find_equiv_reg (goal, insn, class, other, reload_reg_p, goalreg, mode)
case PRE_INC:
case POST_DEC:
case PRE_DEC:
+ case POST_MODIFY:
+ case PRE_MODIFY:
return 0;
default:
break;
@@ -6407,7 +6499,17 @@ find_inc_amount (x, inced)
|| GET_CODE (addr) == POST_INC)
&& XEXP (addr, 0) == inced)
return GET_MODE_SIZE (GET_MODE (x));
- }
+ else if ((GET_CODE (addr) == PRE_MODIFY
+ || GET_CODE (addr) == POST_MODIFY)
+ && GET_CODE (XEXP (addr, 1)) == PLUS
+ && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
+ && XEXP (addr, 0) == inced
+ && GET_CODE (XEXP (XEXP (addr, 1), 1)) == CONST_INT)
+ {
+ i = INTVAL (XEXP (XEXP (addr, 1), 1));
+ return i < 0 ? -i : i;
+ }
+ }
fmt = GET_RTX_FORMAT (code);
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
diff --git a/gcc/reload.h b/gcc/reload.h
index 34b93cf..25bc37c 100644
--- a/gcc/reload.h
+++ b/gcc/reload.h
@@ -344,8 +344,6 @@ int earlyclobber_operand_p PARAMS ((rtx));
extern int reloads_conflict PARAMS ((int, int));
-int count_occurrences PARAMS ((rtx, rtx));
-
/* Initialize the reload pass once per compilation. */
extern void init_reload PARAMS ((void));
diff --git a/gcc/reload1.c b/gcc/reload1.c
index 4949b89..35fbc9e 100644
--- a/gcc/reload1.c
+++ b/gcc/reload1.c
@@ -2633,6 +2633,8 @@ elimination_effects (x, mem_mode)
case POST_INC:
case PRE_DEC:
case POST_DEC:
+ case POST_MODIFY:
+ case PRE_MODIFY:
for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
if (ep->to_rtx == XEXP (x, 0))
{
@@ -2645,10 +2647,19 @@ elimination_effects (x, mem_mode)
#endif
if (code == PRE_DEC || code == POST_DEC)
ep->offset += size;
- else
+ else if (code == PRE_INC || code == POST_INC)
ep->offset -= size;
+ else if ((code == PRE_MODIFY || code == POST_MODIFY)
+ && GET_CODE (XEXP (x, 1)) == PLUS
+ && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
+ && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
+ ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
}
+ /* These two aren't unary operators. */
+ if (code == POST_MODIFY || code == PRE_MODIFY)
+ break;
+
/* Fall through to generic unary operation case. */
case STRICT_LOW_PART:
case NEG: case NOT:
@@ -3884,7 +3895,7 @@ reload_as_needed (live_known)
use PATTERN (p) as argument to reg_set_p . */
if (reg_set_p (reload_reg, PATTERN (p)))
break;
- n = count_occurrences (PATTERN (p), reload_reg);
+ n = count_occurrences (PATTERN (p), reload_reg, 0);
if (! n)
continue;
if (n == 1)
@@ -6190,7 +6201,7 @@ emit_input_reload_insns (chain, rl, old, j)
reloadreg)
/* This is unsafe if operand occurs more than once in current
insn. Perhaps some occurrences aren't reloaded. */
- && count_occurrences (PATTERN (insn), old) == 1
+ && count_occurrences (PATTERN (insn), old, 0) == 1
/* Don't risk splitting a matching pair of operands. */
&& ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
{
@@ -6653,7 +6664,7 @@ do_input_reload (chain, rl, j)
&& TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
{
expect_occurrences
- = count_occurrences (PATTERN (insn), rl->in) == 1 ? 0 : -1;
+ = count_occurrences (PATTERN (insn), rl->in, 0) == 1 ? 0 : -1;
rl->in
= regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
}
@@ -7415,9 +7426,9 @@ delete_output_reload (insn, j, last_reload_reg)
return;
}
}
- n_occurrences = count_occurrences (PATTERN (insn), reg);
+ n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
if (substed)
- n_occurrences += count_occurrences (PATTERN (insn), substed);
+ n_occurrences += count_occurrences (PATTERN (insn), substed, 0);
if (n_occurrences > n_inherited)
return;
@@ -7824,73 +7835,6 @@ constraint_accepts_reg_p (string, reg)
}
}
-/* Return the number of places FIND appears within X, but don't count
- an occurrence if some SET_DEST is FIND. */
-
-int
-count_occurrences (x, find)
- register rtx x, find;
-{
- register int i, j;
- register enum rtx_code code;
- register const char *format_ptr;
- int count;
-
- if (x == find)
- return 1;
- if (x == 0)
- return 0;
-
- code = GET_CODE (x);
-
- switch (code)
- {
- case REG:
- case QUEUED:
- case CONST_INT:
- case CONST_DOUBLE:
- case SYMBOL_REF:
- case CODE_LABEL:
- case PC:
- case CC0:
- return 0;
-
- case MEM:
- if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
- return 1;
- break;
- case SET:
- if (SET_DEST (x) == find)
- return count_occurrences (SET_SRC (x), find);
- break;
-
- default:
- break;
- }
-
- format_ptr = GET_RTX_FORMAT (code);
- count = 0;
-
- for (i = 0; i < GET_RTX_LENGTH (code); i++)
- {
- switch (*format_ptr++)
- {
- case 'e':
- count += count_occurrences (XEXP (x, i), find);
- break;
-
- case 'E':
- if (XVEC (x, i) != NULL)
- {
- for (j = 0; j < XVECLEN (x, i); j++)
- count += count_occurrences (XVECEXP (x, i, j), find);
- }
- break;
- }
- }
- return count;
-}
-
/* INSN is a no-op; delete it.
If this sets the return value of the function, we must keep a USE around,
in case this is in a different basic block than the final USE. Otherwise,
diff --git a/gcc/resource.c b/gcc/resource.c
index ae89d62..606e2e9 100644
--- a/gcc/resource.c
+++ b/gcc/resource.c
@@ -721,6 +721,13 @@ mark_set_resources (x, res, in_dest, mark_type)
mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
return;
+ case PRE_MODIFY:
+ case POST_MODIFY:
+ mark_set_resources (XEXP (x, 0), res, 1, 0);
+ mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, 0);
+ mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, 0);
+ return;
+
case SIGN_EXTRACT:
case ZERO_EXTRACT:
if (! (mark_type == MARK_DEST && in_dest))
diff --git a/gcc/rtl.def b/gcc/rtl.def
index 6a0b73e..fcad7ec 100644
--- a/gcc/rtl.def
+++ b/gcc/rtl.def
@@ -56,6 +56,7 @@ Boston, MA 02111-1307, USA. */
"i" an rtx code for a machine insn (INSN, JUMP_INSN, CALL_INSN)
"m" an rtx code for something that matches in insns (e.g, MATCH_DUP)
"g" an rtx code for grouping insns together (e.g, GROUP_PARALLEL)
+ "a" an rtx code for autoincrement addressing modes (e.g. POST_DEC)
"x" everything else
*/
@@ -756,18 +757,25 @@ DEF_RTL_EXPR(UMAX, "umax", "ee", 'c')
containing MEM. These operations exist in only two cases:
1. pushes onto the stack.
2. created automatically by the life_analysis pass in flow.c. */
-DEF_RTL_EXPR(PRE_DEC, "pre_dec", "e", 'x')
-DEF_RTL_EXPR(PRE_INC, "pre_inc", "e", 'x')
-DEF_RTL_EXPR(POST_DEC, "post_dec", "e", 'x')
-DEF_RTL_EXPR(POST_INC, "post_inc", "e", 'x')
+DEF_RTL_EXPR(PRE_DEC, "pre_dec", "e", 'a')
+DEF_RTL_EXPR(PRE_INC, "pre_inc", "e", 'a')
+DEF_RTL_EXPR(POST_DEC, "post_dec", "e", 'a')
+DEF_RTL_EXPR(POST_INC, "post_inc", "e", 'a')
/* These binary operations are used to represent generic address
side-effects in memory addresses, except for simple incrementation
or decrementation which use the above operations. They are
- created automatically by the life_analysis pass in flow.c.
- (Note that these operators are currently placeholders.) */
-DEF_RTL_EXPR(PRE_MODIFY, "pre_modify", "ee", 'x')
-DEF_RTL_EXPR(POST_MODIFY, "post_modify", "ee", 'x')
+ created automatically by the life_analysis pass in flow.c.
+ The first operand is a REG which is used as the address.
+ The second operand is an expression that is assigned to the
+ register, either before (PRE_MODIFY) or after (POST_MODIFY)
+ evaluating the address.
+ Currently, the compiler can only handle second operands of the
+ form (plus (reg) (reg)) and (plus (reg) (const_int)), where
+ the first operand of the PLUS has to be the same register as
+ the first operand of the *_MODIFY. */
+DEF_RTL_EXPR(PRE_MODIFY, "pre_modify", "ee", 'a')
+DEF_RTL_EXPR(POST_MODIFY, "post_modify", "ee", 'a')
/* Comparison operations. The ordered comparisons exist in two
flavors, signed and unsigned. */
diff --git a/gcc/rtl.h b/gcc/rtl.h
index 4351712..1a20ae4 100644
--- a/gcc/rtl.h
+++ b/gcc/rtl.h
@@ -892,6 +892,22 @@ extern const char * const note_insn_name[NOTE_INSN_MAX - NOTE_INSN_BIAS];
#define HAVE_POST_DECREMENT 0
#endif
+#ifndef HAVE_POST_MODIFY_DISP
+#define HAVE_POST_MODIFY_DISP 0
+#endif
+
+#ifndef HAVE_POST_MODIFY_REG
+#define HAVE_POST_MODIFY_REG 0
+#endif
+
+#ifndef HAVE_PRE_MODIFY_DISP
+#define HAVE_PRE_MODIFY_DISP 0
+#endif
+
+#ifndef HAVE_PRE_MODIFY_REG
+#define HAVE_PRE_MODIFY_REG 0
+#endif
+
/* Some architectures do not have complete pre/post increment/decrement
instruction sets, or only move some modes efficiently. These macros
@@ -1260,6 +1276,7 @@ extern int rtx_addr_varies_p PARAMS ((rtx));
extern HOST_WIDE_INT get_integer_term PARAMS ((rtx));
extern rtx get_related_value PARAMS ((rtx));
extern int reg_mentioned_p PARAMS ((rtx, rtx));
+extern int count_occurrences PARAMS ((rtx, rtx, int));
extern int reg_referenced_p PARAMS ((rtx, rtx));
extern int reg_used_between_p PARAMS ((rtx, rtx, rtx));
extern int reg_referenced_between_p PARAMS ((rtx, rtx, rtx));
diff --git a/gcc/rtl.texi b/gcc/rtl.texi
index 0bd9f0c..247807a 100644
--- a/gcc/rtl.texi
+++ b/gcc/rtl.texi
@@ -165,6 +165,10 @@ An RTX code for an entire instruction: @code{INSN}, @code{JUMP_INSN}, and
An RTX code for something that matches in insns, such as
@code{MATCH_DUP}. These only occur in machine descriptions.
+@item a
+An RTX code for an auto-increment addressing mode, such as
+@code{POST_INC}.
+
@item x
All other RTX codes. This category includes the remaining codes used
only in machine descriptions (@code{DEFINE_*}, etc.). It also includes
diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c
index e7cc12f..105c39a 100644
--- a/gcc/rtlanal.c
+++ b/gcc/rtlanal.c
@@ -246,6 +246,69 @@ get_related_value (x)
return 0;
}
+/* Return the number of places FIND appears within X. If COUNT_DEST is
+ zero, we do not count occurrences inside the destination of a SET. */
+
+int
+count_occurrences (x, find, count_dest)
+ rtx x, find;
+ int count_dest;
+{
+ int i, j;
+ enum rtx_code code;
+ const char *format_ptr;
+ int count;
+
+ if (x == find)
+ return 1;
+
+ code = GET_CODE (x);
+
+ switch (code)
+ {
+ case REG:
+ case CONST_INT:
+ case CONST_DOUBLE:
+ case SYMBOL_REF:
+ case CODE_LABEL:
+ case PC:
+ case CC0:
+ return 0;
+
+ case MEM:
+ if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
+ return 1;
+ break;
+
+ case SET:
+ if (SET_DEST (x) == find && ! count_dest)
+ return count_occurrences (SET_SRC (x), find, count_dest);
+ break;
+
+ default:
+ break;
+ }
+
+ format_ptr = GET_RTX_FORMAT (code);
+ count = 0;
+
+ for (i = 0; i < GET_RTX_LENGTH (code); i++)
+ {
+ switch (*format_ptr++)
+ {
+ case 'e':
+ count += count_occurrences (XEXP (x, i), find, count_dest);
+ break;
+
+ case 'E':
+ for (j = 0; j < XVECLEN (x, i); j++)
+ count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
+ break;
+ }
+ }
+ return count;
+}
+
/* Nonzero if register REG appears somewhere within IN.
Also works if REG is not a register; in this case it checks
for a subexpression of IN that is Lisp "equal" to REG. */
diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c
index 67f7bab..2e79a8a 100644
--- a/gcc/simplify-rtx.c
+++ b/gcc/simplify-rtx.c
@@ -2535,6 +2535,8 @@ hash_rtx (x, mode, create)
case PRE_INC:
case POST_DEC:
case POST_INC:
+ case POST_MODIFY:
+ case PRE_MODIFY:
case PC:
case CC0:
case CALL: