aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorGeorg-Johann Lay <avr@gjlay.de>2023-06-11 13:54:14 +0200
committerGeorg-Johann Lay <avr@gjlay.de>2023-06-11 13:54:14 +0200
commit3443d4ba043a5d2545107d501c6ea7c1112f04dd (patch)
treeb8f1e4894ac35529eb0506f069e7c4726aef65d9 /gcc
parent20643513b8dd34c07f2b0fccf119153a30735f66 (diff)
downloadgcc-3443d4ba043a5d2545107d501c6ea7c1112f04dd.zip
gcc-3443d4ba043a5d2545107d501c6ea7c1112f04dd.tar.gz
gcc-3443d4ba043a5d2545107d501c6ea7c1112f04dd.tar.bz2
Use canonical form for reversed single-bit insertions after reload.
We now split almost all insns after reload in order to add clobber of REG_CC. If insns are coming from insn combiner and there is no canonical form for the respective arithmetic (like for reversed bit insertions), there is no need to keep all these different representations after reload: Instead of splitting such patterns to their clobber-REG_CC-analogon, we can split to a canonical representation, which is insv_notbit for the present case. This is a no-op change. gcc/ * config/avr/avr.md (adjust_len) [insv_notbit_0, insv_notbit_7]: Remove attribute values. (insv_notbit): New post-reload insn. (*insv.not-shiftrt_split, *insv.xor1-bit.0_split) (*insv.not-bit.0_split, *insv.not-bit.7_split) (*insv.xor-extract_split): Split to insv_notbit. (*insv.not-shiftrt, *insv.xor1-bit.0, *insv.not-bit.0, *insv.not-bit.7) (*insv.xor-extract): Remove post-reload insns. * config/avr/avr.cc (avr_out_insert_notbit) [bitno]: Remove parameter. (avr_adjust_insn_length): Adjust call of avr_out_insert_notbit. [ADJUST_LEN_INSV_NOTBIT_0, ADJUST_LEN_INSV_NOTBIT_7]: Remove cases. * config/avr/avr-protos.h (avr_out_insert_notbit): Adjust prototype.
Diffstat (limited to 'gcc')
-rw-r--r--gcc/config/avr/avr-protos.h2
-rw-r--r--gcc/config/avr/avr.cc19
-rw-r--r--gcc/config/avr/avr.md131
3 files changed, 41 insertions, 111 deletions
diff --git a/gcc/config/avr/avr-protos.h b/gcc/config/avr/avr-protos.h
index a10d91d..5c1343f 100644
--- a/gcc/config/avr/avr-protos.h
+++ b/gcc/config/avr/avr-protos.h
@@ -57,7 +57,7 @@ extern const char *avr_out_compare64 (rtx_insn *, rtx*, int*);
extern const char *ret_cond_branch (rtx x, int len, int reverse);
extern const char *avr_out_movpsi (rtx_insn *, rtx*, int*);
extern const char *avr_out_sign_extend (rtx_insn *, rtx*, int*);
-extern const char *avr_out_insert_notbit (rtx_insn *, rtx*, rtx, int*);
+extern const char *avr_out_insert_notbit (rtx_insn *, rtx*, int*);
extern const char *avr_out_extr (rtx_insn *, rtx*, int*);
extern const char *avr_out_extr_not (rtx_insn *, rtx*, int*);
extern const char *avr_out_plus_set_ZN (rtx*, int*);
diff --git a/gcc/config/avr/avr.cc b/gcc/config/avr/avr.cc
index b02fddd..ef6872a 100644
--- a/gcc/config/avr/avr.cc
+++ b/gcc/config/avr/avr.cc
@@ -8995,20 +8995,15 @@ avr_out_addto_sp (rtx *op, int *plen)
}
-/* Output instructions to insert an inverted bit into OPERANDS[0]:
- $0.$1 = ~$2.$3 if XBITNO = NULL
- $0.$1 = ~$2.XBITNO if XBITNO != NULL.
+/* Output instructions to insert an inverted bit into OP[0]: $0.$1 = ~$2.$3.
If PLEN = NULL then output the respective instruction sequence which
is a combination of BST / BLD and some instruction(s) to invert the bit.
If PLEN != NULL then store the length of the sequence (in words) in *PLEN.
Return "". */
const char*
-avr_out_insert_notbit (rtx_insn *insn, rtx operands[], rtx xbitno, int *plen)
+avr_out_insert_notbit (rtx_insn *insn, rtx op[], int *plen)
{
- rtx op[4] = { operands[0], operands[1], operands[2],
- xbitno == NULL_RTX ? operands [3] : xbitno };
-
if (INTVAL (op[1]) == 7
&& test_hard_reg_class (LD_REGS, op[0]))
{
@@ -10038,15 +10033,7 @@ avr_adjust_insn_length (rtx_insn *insn, int len)
case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
case ADJUST_LEN_ADD_SET_ZN: avr_out_plus_set_ZN (op, &len); break;
- case ADJUST_LEN_INSV_NOTBIT:
- avr_out_insert_notbit (insn, op, NULL_RTX, &len);
- break;
- case ADJUST_LEN_INSV_NOTBIT_0:
- avr_out_insert_notbit (insn, op, const0_rtx, &len);
- break;
- case ADJUST_LEN_INSV_NOTBIT_7:
- avr_out_insert_notbit (insn, op, GEN_INT (7), &len);
- break;
+ case ADJUST_LEN_INSV_NOTBIT: avr_out_insert_notbit (insn, op, &len); break;
default:
gcc_unreachable();
diff --git a/gcc/config/avr/avr.md b/gcc/config/avr/avr.md
index eadc482..83dd150 100644
--- a/gcc/config/avr/avr.md
+++ b/gcc/config/avr/avr.md
@@ -163,7 +163,7 @@
ashlhi, ashrhi, lshrhi,
ashlsi, ashrsi, lshrsi,
ashlpsi, ashrpsi, lshrpsi,
- insert_bits, insv_notbit, insv_notbit_0, insv_notbit_7,
+ insert_bits, insv_notbit,
add_set_ZN, cmp_uext, cmp_sext,
no"
(const_string "no"))
@@ -9152,6 +9152,21 @@
;; Same, but with a NOT inverting the source bit.
;; Insert bit ~$2.$3 into $0.$1
+(define_insn "insv_notbit"
+ [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
+ (const_int 1)
+ (match_operand:QI 1 "const_0_to_7_operand" "n"))
+ (not:QI (zero_extract:QI (match_operand:QI 2 "register_operand" "r")
+ (const_int 1)
+ (match_operand:QI 3 "const_0_to_7_operand" "n"))))
+ (clobber (reg:CC REG_CC))]
+ "reload_completed"
+ {
+ return avr_out_insert_notbit (insn, operands, NULL);
+ }
+ [(set_attr "adjust_len" "insv_notbit")])
+
+;; Insert bit ~$2.$3 into $0.$1
(define_insn_and_split "*insv.not-shiftrt_split"
[(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
(const_int 1)
@@ -9161,25 +9176,11 @@
""
"#"
"&& reload_completed"
- [(parallel [(set (zero_extract:QI (match_dup 0)
- (const_int 1)
- (match_dup 1))
- (not:QI (any_shiftrt:QI (match_dup 2)
- (match_dup 3))))
- (clobber (reg:CC REG_CC))])])
-
-(define_insn "*insv.not-shiftrt"
- [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
- (const_int 1)
- (match_operand:QI 1 "const_0_to_7_operand" "n"))
- (not:QI (any_shiftrt:QI (match_operand:QI 2 "register_operand" "r")
- (match_operand:QI 3 "const_0_to_7_operand" "n"))))
- (clobber (reg:CC REG_CC))]
- "reload_completed"
+ [(scratch)]
{
- return avr_out_insert_notbit (insn, operands, NULL_RTX, NULL);
- }
- [(set_attr "adjust_len" "insv_notbit")])
+ emit (gen_insv_notbit (operands[0], operands[1], operands[2], operands[3]));
+ DONE;
+ })
;; Insert bit ~$2.0 into $0.$1
(define_insn_and_split "*insv.xor1-bit.0_split"
@@ -9191,25 +9192,11 @@
""
"#"
"&& reload_completed"
- [(parallel [(set (zero_extract:QI (match_dup 0)
- (const_int 1)
- (match_dup 1))
- (xor:QI (match_dup 2)
- (const_int 1)))
- (clobber (reg:CC REG_CC))])])
-
-(define_insn "*insv.xor1-bit.0"
- [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
- (const_int 1)
- (match_operand:QI 1 "const_0_to_7_operand" "n"))
- (xor:QI (match_operand:QI 2 "register_operand" "r")
- (const_int 1)))
- (clobber (reg:CC REG_CC))]
- "reload_completed"
+ [(scratch)]
{
- return avr_out_insert_notbit (insn, operands, const0_rtx, NULL);
- }
- [(set_attr "adjust_len" "insv_notbit_0")])
+ emit (gen_insv_notbit (operands[0], operands[1], operands[2], const0_rtx));
+ DONE;
+ })
;; Insert bit ~$2.0 into $0.$1
(define_insn_and_split "*insv.not-bit.0_split"
@@ -9220,23 +9207,11 @@
""
"#"
"&& reload_completed"
- [(parallel [(set (zero_extract:QI (match_dup 0)
- (const_int 1)
- (match_dup 1))
- (not:QI (match_dup 2)))
- (clobber (reg:CC REG_CC))])])
-
-(define_insn "*insv.not-bit.0"
- [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
- (const_int 1)
- (match_operand:QI 1 "const_0_to_7_operand" "n"))
- (not:QI (match_operand:QI 2 "register_operand" "r")))
- (clobber (reg:CC REG_CC))]
- "reload_completed"
+ [(scratch)]
{
- return avr_out_insert_notbit (insn, operands, const0_rtx, NULL);
- }
- [(set_attr "adjust_len" "insv_notbit_0")])
+ emit (gen_insv_notbit (operands[0], operands[1], operands[2], const0_rtx));
+ DONE;
+ })
;; Insert bit ~$2.7 into $0.$1
(define_insn_and_split "*insv.not-bit.7_split"
@@ -9248,25 +9223,11 @@
""
"#"
"&& reload_completed"
- [(parallel [(set (zero_extract:QI (match_dup 0)
- (const_int 1)
- (match_dup 1))
- (ge:QI (match_dup 2)
- (const_int 0)))
- (clobber (reg:CC REG_CC))])])
-
-(define_insn "*insv.not-bit.7"
- [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
- (const_int 1)
- (match_operand:QI 1 "const_0_to_7_operand" "n"))
- (ge:QI (match_operand:QI 2 "register_operand" "r")
- (const_int 0)))
- (clobber (reg:CC REG_CC))]
- "reload_completed"
+ [(scratch)]
{
- return avr_out_insert_notbit (insn, operands, GEN_INT (7), NULL);
- }
- [(set_attr "adjust_len" "insv_notbit_7")])
+ emit (gen_insv_notbit (operands[0], operands[1], operands[2], GEN_INT(7)));
+ DONE;
+ })
;; Insert bit ~$2.$3 into $0.$1
(define_insn_and_split "*insv.xor-extract_split"
@@ -9280,31 +9241,13 @@
"INTVAL (operands[4]) & (1 << INTVAL (operands[3]))"
"#"
"&& reload_completed"
- [(parallel [(set (zero_extract:QI (match_dup 0)
- (const_int 1)
- (match_dup 1))
- (any_extract:QI (xor:QI (match_dup 2)
- (match_dup 4))
- (const_int 1)
- (match_dup 3)))
- (clobber (reg:CC REG_CC))])])
-
-(define_insn "*insv.xor-extract"
- [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
- (const_int 1)
- (match_operand:QI 1 "const_0_to_7_operand" "n"))
- (any_extract:QI (xor:QI (match_operand:QI 2 "register_operand" "r")
- (match_operand:QI 4 "const_int_operand" "n"))
- (const_int 1)
- (match_operand:QI 3 "const_0_to_7_operand" "n")))
- (clobber (reg:CC REG_CC))]
- "INTVAL (operands[4]) & (1 << INTVAL (operands[3])) && reload_completed"
+ [(scratch)]
{
- return avr_out_insert_notbit (insn, operands, NULL_RTX, NULL);
- }
- [(set_attr "adjust_len" "insv_notbit")])
+ emit (gen_insv_notbit (operands[0], operands[1], operands[2], operands[3]));
+ DONE;
+ })
+
-
;; Some combine patterns that try to fix bad code when a value is composed
;; from byte parts like in PR27663.
;; The patterns give some release but the code still is not optimal,