aboutsummaryrefslogtreecommitdiff
path: root/gcc/config/avr/avr.c
diff options
context:
space:
mode:
authorSenthil Kumar Selvaraj <saaadhu@gcc.gnu.org>2021-04-28 17:29:12 +0000
committerSenthil Kumar Selvaraj <saaadhu@gcc.gnu.org>2021-04-28 17:42:01 +0000
commit3ba781d3b5c8efadb60866c9743b657e8f0eb222 (patch)
treecae42c5f7f165eedebca004b3b75dacabcdb847e /gcc/config/avr/avr.c
parentc8767ee9f9355a63bfeb8318df32bc39c5b0f3ad (diff)
downloadgcc-3ba781d3b5c8efadb60866c9743b657e8f0eb222.zip
gcc-3ba781d3b5c8efadb60866c9743b657e8f0eb222.tar.gz
gcc-3ba781d3b5c8efadb60866c9743b657e8f0eb222.tar.bz2
AVR cc0 conversion
See https://gcc.gnu.org/pipermail/gcc-patches/2021-January/563638.html for background. This patch converts the avr backend to MODE_CC. It addresses some of the comments made in the previous submission over here (https://gcc.gnu.org/pipermail/gcc-patches/2020-December/561757.html). Specifically, this patch has 1. Automatic clobber of REG_CC in inline asm statements, via TARGET_MD_ASM_ADJUST hook. 2. Direct clobber of REG_CC in insns emitted after reload (pro and epilogue). 3. Regression testing done on atmega8, atmega128, attiny40 and atxmega128a3 devices (more details below). 4. Verification and fixes for casesi and avr_compare_pattern related code that inspects insns, by looking at avr-casesi and mach RTL dumps. 5. Use length of parallel instead of passing in operand counts when generating code for shift patterns. 6. Fixes for indentation glitches. 7. Removal of CC_xxx stuff in avr-protos.h. In the places where the macros were still used (cond_string), I've replaced them with a bool hardcoded to false. I expect this will go away/get fixed when I eventually add specific CC modes. Things still to do: 1. Adjustment of peepholes/define_splits to match against patterns with REG_CC clobber. 2. Model effect of non-compare insns on REG_CC using additional CC modes. I'm hoping to use of a modified version of the cc attribute and define_subst (again inspired by the cris port), to do this. 3. RTX cost adjustment. gcc/ * config/avr/avr-dimode.md: Turn existing patterns into define_insn_and_split style patterns where the splitter adds a clobber of the condition code register. Drop "cc" attribute. Add new patterns to match output of the splitters. * config/avr/avr-fixed.md: Likewise. * config/avr/avr.c (cc_reg_rtx): New. (avr_parallel_insn_from_insns): Adjust insn count for removal of set of cc0. (avr_is_casesi_sequence): Likewise. (avr_casei_sequence_check_operands): Likewise. (avr_optimize_casesi): Likewise. Also insert new insns after jump_insn. (avr_pass_casesi::avr_rest_of_handle_casesi): Adjust for removal of set of cc0. (avr_init_expanders): Initialize cc_reg_rtx. (avr_regno_reg_class): Handle REG_CC. (cond_string): Remove usage of CC_OVERFLOW_UNUSABLE. (avr_notice_update_cc): Remove function. (ret_cond_branch): Remove usage of CC_OVERFLOW_UNUSABLE. (compare_condition): Adjust for PARALLEL with REG_CC clobber. (out_shift_with_cnt): Likewise. (ashlhi3_out): Likewise. (ashrhi3_out): Likewise. (lshrhi3_out): Likewise. (avr_class_max_nregs): Return single reg for REG_CC. (avr_compare_pattern): Check for REG_CC instead of cc0_rtx. (avr_reorg_remove_redundant_compare): Likewise. (avr_reorg):Adjust for PARALLEL with REG_CC clobber. (avr_hard_regno_nregs): Return single reg for REG_CC. (avr_hard_regno_mode_ok): Allow only CCmode for REG_CC. (avr_md_asm_adjust): Clobber REG_CC. (TARGET_HARD_REGNO_NREGS): Define. (TARGET_CLASS_MAX_NREGS): Define. (TARGET_MD_ASM_ADJUST): Define. * config/avr/avr.h (FIRST_PSEUDO_REGISTER): Adjust for REG_CC. (enum reg_class): Add CC_REG class. (NOTICE_UPDATE_CC): Remove. (CC_OVERFLOW_UNUSABLE): Remove. (CC_NO_CARRY): Remove. * config/avr/avr.md: Turn existing patterns into define_insn_and_split style patterns where the splitter adds a clobber of the condition code register. Drop "cc" attribute. Add new patterns to match output of the splitters. (sez): Remove unused pattern.
Diffstat (limited to 'gcc/config/avr/avr.c')
-rw-r--r--gcc/config/avr/avr.c318
1 files changed, 130 insertions, 188 deletions
diff --git a/gcc/config/avr/avr.c b/gcc/config/avr/avr.c
index 3a250df..06c84d5 100644
--- a/gcc/config/avr/avr.c
+++ b/gcc/config/avr/avr.c
@@ -195,6 +195,10 @@ rtx tmp_reg_rtx;
extern GTY(()) rtx zero_reg_rtx;
rtx zero_reg_rtx;
+/* Condition Code register RTX (reg:CC REG_CC) */
+extern GTY(()) rtx cc_reg_rtx;
+rtx cc_reg_rtx;
+
/* RTXs for all general purpose registers as QImode */
extern GTY(()) rtx all_regs_rtx[32];
rtx all_regs_rtx[32];
@@ -376,10 +380,10 @@ make_avr_pass_casesi (gcc::context *ctxt)
/* Make one parallel insn with all the patterns from insns i[0]..i[5]. */
static rtx_insn*
-avr_parallel_insn_from_insns (rtx_insn *i[6])
+avr_parallel_insn_from_insns (rtx_insn *i[5])
{
- rtvec vec = gen_rtvec (6, PATTERN (i[0]), PATTERN (i[1]), PATTERN (i[2]),
- PATTERN (i[3]), PATTERN (i[4]), PATTERN (i[5]));
+ rtvec vec = gen_rtvec (5, PATTERN (i[0]), PATTERN (i[1]), PATTERN (i[2]),
+ PATTERN (i[3]), PATTERN (i[4]));
start_sequence();
emit (gen_rtx_PARALLEL (VOIDmode, vec));
rtx_insn *insn = get_insns();
@@ -397,22 +401,21 @@ avr_parallel_insn_from_insns (rtx_insn *i[6])
pattern casesi_<mode>_sequence forged from the sequence to recog_data. */
static bool
-avr_is_casesi_sequence (basic_block bb, rtx_insn *insn, rtx_insn *insns[6])
+avr_is_casesi_sequence (basic_block bb, rtx_insn *insn, rtx_insn *insns[5])
{
- rtx set_5, set_0;
+ rtx set_4, set_0;
/* A first and quick test for a casesi sequences. As a side effect of
- the test, harvest respective insns to INSNS[0..5]. */
+ the test, harvest respective insns to INSNS[0..4]. */
- if (!(JUMP_P (insns[5] = insn)
+ if (!(JUMP_P (insns[4] = insn)
// casesi is the only insn that comes up with UNSPEC_INDEX_JMP,
// hence the following test ensures that we are actually dealing
// with code from casesi.
- && (set_5 = single_set (insns[5]))
- && UNSPEC == GET_CODE (SET_SRC (set_5))
- && UNSPEC_INDEX_JMP == XINT (SET_SRC (set_5), 1)
+ && (set_4 = single_set (insns[4]))
+ && UNSPEC == GET_CODE (SET_SRC (set_4))
+ && UNSPEC_INDEX_JMP == XINT (SET_SRC (set_4), 1)
- && (insns[4] = prev_real_insn (insns[5]))
&& (insns[3] = prev_real_insn (insns[4]))
&& (insns[2] = prev_real_insn (insns[3]))
&& (insns[1] = prev_real_insn (insns[2]))
@@ -429,7 +432,7 @@ avr_is_casesi_sequence (basic_block bb, rtx_insn *insn, rtx_insn *insns[6])
{
fprintf (dump_file, ";; Sequence from casesi in "
"[bb %d]:\n\n", bb->index);
- for (int i = 0; i < 6; i++)
+ for (int i = 0; i < 5; i++)
print_rtl_single (dump_file, insns[i]);
}
@@ -519,7 +522,7 @@ avr_casei_sequence_check_operands (rtx *xop)
}
-/* INSNS[1..5] is a sequence as generated by casesi and INSNS[0] is an
+/* INSNS[1..4] is a sequence as generated by casesi and INSNS[0] is an
extension of an 8-bit or 16-bit integer to SImode. XOP contains the
operands of INSNS as extracted by insn_extract from pattern
casesi_<mode>_sequence:
@@ -541,7 +544,7 @@ avr_casei_sequence_check_operands (rtx *xop)
switch value instead of SImode. */
static void
-avr_optimize_casesi (rtx_insn *insns[6], rtx *xop)
+avr_optimize_casesi (rtx_insn *insns[5], rtx *xop)
{
// Original mode of the switch value; this is QImode or HImode.
machine_mode mode = GET_MODE (xop[10]);
@@ -597,16 +600,21 @@ avr_optimize_casesi (rtx_insn *insns[6], rtx *xop)
rtx reg = copy_to_mode_reg (mode, xop[10]);
rtx (*gen_add)(rtx,rtx,rtx) = QImode == mode ? gen_addqi3 : gen_addhi3;
- rtx (*gen_cmp)(rtx,rtx) = QImode == mode ? gen_cmpqi3 : gen_cmphi3;
+ rtx (*gen_cbranch)(rtx,rtx,rtx,rtx)
+ = QImode == mode ? gen_cbranchqi4 : gen_cbranchhi4;
emit_insn (gen_add (reg, reg, gen_int_mode (-low_idx, mode)));
- emit_insn (gen_cmp (reg, gen_int_mode (num_idx, mode)));
+ rtx op0 = reg; rtx op1 = gen_int_mode (num_idx, mode);
+ rtx labelref = copy_rtx (xop[4]);
+ emit_jump_insn (gen_cbranch (gen_rtx_fmt_ee (GTU, VOIDmode, op0, op1),
+ op0, op1,
+ labelref));
seq1 = get_insns();
last1 = get_last_insn();
end_sequence();
- emit_insn_before (seq1, insns[1]);
+ emit_insn_after (seq1, insns[2]);
// After the out-of-bounds test and corresponding branch, use a
// 16-bit index. If QImode is used, extend it to HImode first.
@@ -627,7 +635,7 @@ avr_optimize_casesi (rtx_insn *insns[6], rtx *xop)
last2 = get_last_insn();
end_sequence();
- emit_insn_after (seq2, insns[4]);
+ emit_insn_after (seq2, insns[3]);
if (dump_file)
{
@@ -648,7 +656,7 @@ avr_optimize_casesi (rtx_insn *insns[6], rtx *xop)
}
fprintf (dump_file, ";; Deleting insns: %d, %d, %d.\n\n",
- INSN_UID (insns[1]), INSN_UID (insns[2]), INSN_UID (insns[4]));
+ INSN_UID (insns[1]), INSN_UID (insns[2]), INSN_UID (insns[3]));
}
// Pseudodelete the SImode and subreg of SImode insns. We don't care
@@ -657,7 +665,7 @@ avr_optimize_casesi (rtx_insn *insns[6], rtx *xop)
SET_INSN_DELETED (insns[1]);
SET_INSN_DELETED (insns[2]);
- SET_INSN_DELETED (insns[4]);
+ SET_INSN_DELETED (insns[3]);
}
@@ -668,7 +676,7 @@ avr_pass_casesi::avr_rest_of_handle_casesi (function *func)
FOR_EACH_BB_FN (bb, func)
{
- rtx_insn *insn, *insns[6];
+ rtx_insn *insn, *insns[5];
FOR_BB_INSNS (bb, insn)
{
@@ -814,6 +822,8 @@ avr_init_expanders (void)
tmp_reg_rtx = all_regs_rtx[AVR_TMP_REGNO];
zero_reg_rtx = all_regs_rtx[AVR_ZERO_REGNO];
+ cc_reg_rtx = gen_rtx_REG (CCmode, REG_CC);
+
lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
@@ -864,6 +874,9 @@ avr_regno_reg_class (int r)
if (r <= 33)
return reg_class_tab[r];
+ if (r == REG_CC)
+ return CC_REG;
+
return ALL_REGS;
}
@@ -2641,6 +2654,8 @@ ptrreg_to_str (int regno)
static const char*
cond_string (enum rtx_code code)
{
+ bool cc_overflow_unusable = false;
+
switch (code)
{
case NE:
@@ -2648,12 +2663,12 @@ cond_string (enum rtx_code code)
case EQ:
return "eq";
case GE:
- if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
+ if (cc_overflow_unusable)
return "pl";
else
return "ge";
case LT:
- if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
+ if (cc_overflow_unusable)
return "mi";
else
return "lt";
@@ -2989,152 +3004,6 @@ avr_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
return size <= MOVE_MAX_PIECES;
}
-
-/* Worker function for `NOTICE_UPDATE_CC'. */
-/* Update the condition code in the INSN. */
-
-void
-avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
-{
- rtx set;
- enum attr_cc cc = get_attr_cc (insn);
-
- switch (cc)
- {
- default:
- break;
-
- case CC_PLUS:
- case CC_LDI:
- {
- rtx *op = recog_data.operand;
- int len_dummy, icc;
-
- /* Extract insn's operands. */
- extract_constrain_insn_cached (insn);
-
- switch (cc)
- {
- default:
- gcc_unreachable();
-
- case CC_PLUS:
- avr_out_plus (insn, op, &len_dummy, &icc);
- cc = (enum attr_cc) icc;
- break;
-
- case CC_LDI:
-
- cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
- && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
- /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
- ? CC_CLOBBER
- /* Any other "r,rL" combination does not alter cc0. */
- : CC_NONE;
-
- break;
- } /* inner switch */
-
- break;
- }
- } /* outer swicth */
-
- switch (cc)
- {
- default:
- /* Special values like CC_OUT_PLUS from above have been
- mapped to "standard" CC_* values so we never come here. */
-
- gcc_unreachable();
- break;
-
- case CC_NONE:
- /* Insn does not affect CC at all, but it might set some registers
- that are stored in cc_status. If such a register is affected by
- the current insn, for example by means of a SET or a CLOBBER,
- then we must reset cc_status; cf. PR77326.
-
- Unfortunately, set_of cannot be used as reg_overlap_mentioned_p
- will abort on COMPARE (which might be found in cc_status.value1/2).
- Thus work out the registers set by the insn and regs mentioned
- in cc_status.value1/2. */
-
- if (cc_status.value1
- || cc_status.value2)
- {
- HARD_REG_SET regs_used;
- HARD_REG_SET regs_set;
- CLEAR_HARD_REG_SET (regs_used);
-
- if (cc_status.value1
- && !CONSTANT_P (cc_status.value1))
- {
- find_all_hard_regs (cc_status.value1, &regs_used);
- }
-
- if (cc_status.value2
- && !CONSTANT_P (cc_status.value2))
- {
- find_all_hard_regs (cc_status.value2, &regs_used);
- }
-
- find_all_hard_reg_sets (insn, &regs_set, false);
-
- if (hard_reg_set_intersect_p (regs_used, regs_set))
- {
- CC_STATUS_INIT;
- }
- }
-
- break; // CC_NONE
-
- case CC_SET_N:
- CC_STATUS_INIT;
- break;
-
- case CC_SET_ZN:
- set = single_set (insn);
- CC_STATUS_INIT;
- if (set)
- {
- cc_status.flags |= CC_NO_OVERFLOW;
- cc_status.value1 = SET_DEST (set);
- }
- break;
-
- case CC_SET_VZN:
- /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
- of this combination, cf. also PR61055. */
- CC_STATUS_INIT;
- break;
-
- case CC_SET_CZN:
- /* Insn sets the Z,N,C flags of CC to recog_operand[0].
- The V flag may or may not be known but that's ok because
- alter_cond will change tests to use EQ/NE. */
- set = single_set (insn);
- CC_STATUS_INIT;
- if (set)
- {
- cc_status.value1 = SET_DEST (set);
- cc_status.flags |= CC_OVERFLOW_UNUSABLE;
- }
- break;
-
- case CC_COMPARE:
- set = single_set (insn);
- CC_STATUS_INIT;
- if (set)
- cc_status.value1 = SET_SRC (set);
- break;
-
- case CC_CLOBBER:
- /* Insn doesn't leave CC in a usable state. */
- CC_STATUS_INIT;
- break;
- }
-}
-
/* Choose mode for jump insn:
1 - relative jump in range -63 <= x <= 62 ;
2 - relative jump in range -2046 <= x <= 2045 ;
@@ -3167,11 +3036,12 @@ const char*
ret_cond_branch (rtx x, int len, int reverse)
{
RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
+ bool cc_overflow_unusable = false;
switch (cond)
{
case GT:
- if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
+ if (cc_overflow_unusable)
return (len == 1 ? ("breq .+2" CR_TAB
"brpl %0") :
len == 2 ? ("breq .+4" CR_TAB
@@ -3200,7 +3070,7 @@ ret_cond_branch (rtx x, int len, int reverse)
"brlo .+4" CR_TAB
"jmp %0"));
case LE:
- if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
+ if (cc_overflow_unusable)
return (len == 1 ? ("breq %0" CR_TAB
"brmi %0") :
len == 2 ? ("breq .+2" CR_TAB
@@ -5820,6 +5690,8 @@ compare_condition (rtx_insn *insn)
if (next && JUMP_P (next))
{
rtx pat = PATTERN (next);
+ if (GET_CODE (pat) == PARALLEL)
+ pat = XVECEXP (pat, 0, 0);
rtx src = SET_SRC (pat);
if (IF_THEN_ELSE == GET_CODE (src))
@@ -6179,7 +6051,13 @@ out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
if (CONST_INT_P (operands[2]))
{
+ /* Operand 3 is a scratch register if this is a
+ parallel with three elements i.e. a set,
+ a clobber of a scratch, and clobber of REG_CC.
+ If a scratch reg is not available, then the parallel
+ will contain only a set and clobber of REG_CC. */
bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
+ && XVECLEN (PATTERN (insn), 0) == 3
&& REG_P (operands[3]));
int count = INTVAL (operands[2]);
int max_len = 10; /* If larger than this, always use a loop. */
@@ -6376,7 +6254,9 @@ ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
{
if (CONST_INT_P (operands[2]))
{
- int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
+ && XVECLEN (PATTERN (insn), 0) == 3
+ && REG_P (operands[3]));
int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
int k;
int *t = len;
@@ -6857,7 +6737,9 @@ ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
{
if (CONST_INT_P (operands[2]))
{
- int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
+ && XVECLEN (PATTERN (insn), 0) == 3
+ && REG_P (operands[3]));
int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
int k;
int *t = len;
@@ -7271,7 +7153,9 @@ lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
{
if (CONST_INT_P (operands[2]))
{
- int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
+ && XVECLEN (PATTERN (insn), 0) == 3
+ && REG_P (operands[3]));
int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
int k;
int *t = len;
@@ -9619,6 +9503,18 @@ avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
return default_assemble_integer (x, size, aligned_p);
}
+/* Implement TARGET_CLASS_MAX_NREGS. Reasons described in comments for
+ avr_hard_regno_nregs. */
+
+static unsigned char
+avr_class_max_nregs (reg_class_t rclass, machine_mode mode)
+{
+ if (rclass == CC_REG && mode == CCmode)
+ return 1;
+
+ return CEIL (GET_MODE_SIZE (mode), UNITS_PER_WORD);
+}
+
/* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
/* Return value is nonzero if pseudos that have been
@@ -11719,7 +11615,8 @@ avr_compare_pattern (rtx_insn *insn)
if (pattern
&& NONJUMP_INSN_P (insn)
- && SET_DEST (pattern) == cc0_rtx
+ && REG_P (SET_DEST (pattern))
+ && REGNO (SET_DEST (pattern)) == REG_CC
&& GET_CODE (SET_SRC (pattern)) == COMPARE)
{
machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
@@ -11740,18 +11637,18 @@ avr_compare_pattern (rtx_insn *insn)
/* Expansion of switch/case decision trees leads to code like
- cc0 = compare (Reg, Num)
- if (cc0 == 0)
+ REG_CC = compare (Reg, Num)
+ if (REG_CC == 0)
goto L1
- cc0 = compare (Reg, Num)
- if (cc0 > 0)
+ REG_CC = compare (Reg, Num)
+ if (REG_CC > 0)
goto L2
The second comparison is superfluous and can be deleted.
The second jump condition can be transformed from a
- "difficult" one to a "simple" one because "cc0 > 0" and
- "cc0 >= 0" will have the same effect here.
+ "difficult" one to a "simple" one because "REG_CC > 0" and
+ "REG_CC >= 0" will have the same effect here.
This function relies on the way switch/case is being expaned
as binary decision tree. For example code see PR 49903.
@@ -11822,8 +11719,8 @@ avr_reorg_remove_redundant_compare (rtx_insn *insn1)
|| LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
|| LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
|| !COMPARISON_P (XEXP (ifelse2, 0))
- || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
- || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
+ || REG_CC != REGNO (XEXP (XEXP (ifelse1, 0), 0))
+ || REG_CC != REGNO (XEXP (XEXP (ifelse2, 0), 0))
|| const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
|| const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
{
@@ -11832,20 +11729,20 @@ avr_reorg_remove_redundant_compare (rtx_insn *insn1)
/* We filtered the insn sequence to look like
- (set (cc0)
+ (set (reg:CC cc)
(compare (reg:M N)
(const_int VAL)))
(set (pc)
- (if_then_else (eq (cc0)
+ (if_then_else (eq (reg:CC cc)
(const_int 0))
(label_ref L1)
(pc)))
- (set (cc0)
+ (set (reg:CC cc)
(compare (reg:M N)
(const_int VAL)))
(set (pc)
- (if_then_else (CODE (cc0)
+ (if_then_else (CODE (reg:CC cc)
(const_int 0))
(label_ref L2)
(pc)))
@@ -11893,7 +11790,7 @@ avr_reorg_remove_redundant_compare (rtx_insn *insn1)
JUMP_LABEL (jump) = JUMP_LABEL (branch1);
target = XEXP (XEXP (ifelse2, 1), 0);
- cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
+ cond = gen_rtx_fmt_ee (code, VOIDmode, cc_reg_rtx, const0_rtx);
jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
JUMP_LABEL (jump) = JUMP_LABEL (branch2);
@@ -11936,6 +11833,8 @@ avr_reorg (void)
rtx_insn *next = next_real_insn (insn);
rtx pat = PATTERN (next);
+ if (GET_CODE (pat) == PARALLEL)
+ pat = XVECEXP (pat, 0, 0);
pattern = SET_SRC (pattern);
@@ -12119,6 +12018,22 @@ jump_over_one_insn_p (rtx_insn *insn, rtx dest)
&& avr_2word_insn_p (next_active_insn (insn))));
}
+/* Implement TARGET_HARD_REGNO_NREGS. CCmode is four units for historical
+ reasons. If this hook is not defined, TARGET_HARD_REGNO_NREGS
+ reports that CCmode requires four registers.
+ Define this hook to allow CCmode to fit in a single REG_CC. For
+ other modes and regs, return the number of words in mode (i.e whatever
+ the default implementation of the hook returned). */
+
+static unsigned int
+avr_hard_regno_nregs (unsigned int regno, machine_mode mode)
+{
+ if (regno == REG_CC && mode == CCmode)
+ return 1;
+
+ return CEIL (GET_MODE_SIZE (mode), UNITS_PER_WORD);
+}
+
/* Implement TARGET_HARD_REGNO_MODE_OK. On the enhanced core, anything
larger than 1 byte must start in even numbered register for "movw" to
@@ -12127,6 +12042,9 @@ jump_over_one_insn_p (rtx_insn *insn, rtx dest)
static bool
avr_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
{
+ if (regno == REG_CC)
+ return mode == CCmode;
+
/* NOTE: 8-bit values must not be disallowed for R28 or R29.
Disallowing QI et al. in these regs might lead to code like
(set (subreg:QI (reg:HI 28) n) ...)
@@ -14575,6 +14493,21 @@ avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
return NULL_TREE;
}
+/* Prepend to CLOBBERS hard registers that are automatically clobbered
+ for an asm. We do this for CC_REGNUM to maintain source compatibility
+ with the original cc0-based compiler. */
+
+static rtx_insn *
+avr_md_asm_adjust (vec<rtx> &/*outputs*/, vec<rtx> &/*inputs*/,
+ vec<machine_mode> & /*input_modes*/,
+ vec<const char *> &/*constraints*/,
+ vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs)
+{
+ clobbers.safe_push (cc_reg_rtx);
+ SET_HARD_REG_BIT (clobbered_regs, REG_CC);
+ return NULL;
+}
+
/* Worker function for `FLOAT_LIB_COMPARE_RETURNS_BOOL'. */
@@ -14669,6 +14602,9 @@ avr_float_lib_compare_returns_bool (machine_mode mode, enum rtx_code)
#undef TARGET_CONDITIONAL_REGISTER_USAGE
#define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
+#undef TARGET_HARD_REGNO_NREGS
+#define TARGET_HARD_REGNO_NREGS avr_hard_regno_nregs
+
#undef TARGET_HARD_REGNO_MODE_OK
#define TARGET_HARD_REGNO_MODE_OK avr_hard_regno_mode_ok
#undef TARGET_HARD_REGNO_SCRATCH_OK
@@ -14694,6 +14630,9 @@ avr_float_lib_compare_returns_bool (machine_mode mode, enum rtx_code)
#undef TARGET_CLASS_LIKELY_SPILLED_P
#define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
+#undef TARGET_CLASS_MAX_NREGS
+#define TARGET_CLASS_MAX_NREGS avr_class_max_nregs
+
#undef TARGET_OPTION_OVERRIDE
#define TARGET_OPTION_OVERRIDE avr_option_override
@@ -14772,6 +14711,9 @@ avr_float_lib_compare_returns_bool (machine_mode mode, enum rtx_code)
#undef TARGET_STARTING_FRAME_OFFSET
#define TARGET_STARTING_FRAME_OFFSET avr_starting_frame_offset
+#undef TARGET_MD_ASM_ADJUST
+#define TARGET_MD_ASM_ADJUST avr_md_asm_adjust
+
struct gcc_target targetm = TARGET_INITIALIZER;