aboutsummaryrefslogtreecommitdiff
path: root/gcc/final.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/final.c')
-rw-r--r--gcc/final.c80
1 files changed, 40 insertions, 40 deletions
diff --git a/gcc/final.c b/gcc/final.c
index 48be292..412d0fa 100644
--- a/gcc/final.c
+++ b/gcc/final.c
@@ -625,7 +625,7 @@ insn_current_reference_address (rtx branch)
seq = NEXT_INSN (PREV_INSN (branch));
seq_uid = INSN_UID (seq);
- if (GET_CODE (branch) != JUMP_INSN)
+ if (!JUMP_P (branch))
/* This can happen for example on the PA; the objective is to know the
offset to address something in front of the start of the function.
Thus, we can treat it like a backward branch.
@@ -678,7 +678,7 @@ compute_alignments (void)
int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
edge e;
- if (GET_CODE (label) != CODE_LABEL
+ if (!LABEL_P (label)
|| probably_never_executed_bb_p (bb))
continue;
max_log = LABEL_ALIGN (label);
@@ -817,7 +817,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
is separated by the former loop start insn from the
NOTE_INSN_LOOP_BEG. */
}
- else if (GET_CODE (insn) == CODE_LABEL)
+ else if (LABEL_P (insn))
{
rtx next;
@@ -839,7 +839,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
/* ADDR_VECs only take room if read-only data goes into the text
section. */
if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
- if (next && GET_CODE (next) == JUMP_INSN)
+ if (next && JUMP_P (next))
{
rtx nextbody = PATTERN (next);
if (GET_CODE (nextbody) == ADDR_VEC
@@ -858,13 +858,13 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
max_log = 0;
max_skip = 0;
}
- else if (GET_CODE (insn) == BARRIER)
+ else if (BARRIER_P (insn))
{
rtx label;
for (label = insn; label && ! INSN_P (label);
label = NEXT_INSN (label))
- if (GET_CODE (label) == CODE_LABEL)
+ if (LABEL_P (label))
{
log = LABEL_ALIGN_AFTER_BARRIER (insn);
if (max_log < log)
@@ -901,7 +901,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
{
int uid = INSN_UID (seq);
int log;
- log = (GET_CODE (seq) == CODE_LABEL ? LABEL_TO_ALIGNMENT (seq) : 0);
+ log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
uid_align[uid] = align_tab[0];
if (log)
{
@@ -928,7 +928,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
int min_align;
addr_diff_vec_flags flags;
- if (GET_CODE (insn) != JUMP_INSN
+ if (!JUMP_P (insn)
|| GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
continue;
pat = PATTERN (insn);
@@ -977,7 +977,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
insn_lengths[uid] = 0;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
int log = LABEL_TO_ALIGNMENT (insn);
if (log)
@@ -990,8 +990,8 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
- if (GET_CODE (insn) == NOTE || GET_CODE (insn) == BARRIER
- || GET_CODE (insn) == CODE_LABEL)
+ if (NOTE_P (insn) || BARRIER_P (insn)
+ || LABEL_P (insn))
continue;
if (INSN_DELETED_P (insn))
continue;
@@ -1082,7 +1082,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
uid = INSN_UID (insn);
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
int log = LABEL_TO_ALIGNMENT (insn);
if (log > insn_current_align)
@@ -1107,7 +1107,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
INSN_ADDRESSES (uid) = insn_current_address;
#ifdef CASE_VECTOR_SHORTEN_MODE
- if (optimize && GET_CODE (insn) == JUMP_INSN
+ if (optimize && JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
{
rtx body = PATTERN (insn);
@@ -1217,7 +1217,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
if (! (varying_length[uid]))
{
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
{
int i;
@@ -1239,7 +1239,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
continue;
}
- if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
{
int i;
@@ -1517,7 +1517,7 @@ final (rtx first, FILE *file, int optimize, int prescan)
{
rtx last = 0;
for (insn = first; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
if (last != 0
#ifdef USE_MAPPED_LOCATION
@@ -1543,7 +1543,7 @@ final (rtx first, FILE *file, int optimize, int prescan)
#ifdef HAVE_cc0
/* If CC tracking across branches is enabled, record the insn which
jumps to each branch only reached from one place. */
- if (optimize && GET_CODE (insn) == JUMP_INSN)
+ if (optimize && JUMP_P (insn))
{
rtx lab = JUMP_LABEL (insn);
if (lab && LABEL_NUSES (lab) == 1)
@@ -1566,7 +1566,7 @@ final (rtx first, FILE *file, int optimize, int prescan)
{
/* This can be triggered by bugs elsewhere in the compiler if
new insns are created after init_insn_lengths is called. */
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
insn_current_address = -1;
else
abort ();
@@ -1641,10 +1641,10 @@ scan_ahead_for_unlikely_executed_note (rtx insn)
for (temp = insn; temp; temp = NEXT_INSN (temp))
{
- if (GET_CODE (temp) == NOTE
+ if (NOTE_P (temp)
&& NOTE_LINE_NUMBER (temp) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
return true;
- if (GET_CODE (temp) == NOTE
+ if (NOTE_P (temp)
&& NOTE_LINE_NUMBER (temp) == NOTE_INSN_BASIC_BLOCK)
{
bb_note_count++;
@@ -1902,10 +1902,10 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
insn, and that branch is the only way to reach this label,
set the condition codes based on the branch and its
predecessor. */
- if (barrier && GET_CODE (barrier) == BARRIER
- && jump && GET_CODE (jump) == JUMP_INSN
+ if (barrier && BARRIER_P (barrier)
+ && jump && JUMP_P (jump)
&& (prev = prev_nonnote_insn (jump))
- && GET_CODE (prev) == INSN)
+ && NONJUMP_INSN_P (prev))
{
NOTICE_UPDATE_CC (PATTERN (prev), prev);
NOTICE_UPDATE_CC (PATTERN (jump), jump);
@@ -1925,7 +1925,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
if (flag_reorder_blocks_and_partition)
{
rtx tmp_table, tmp_label;
- if (GET_CODE (insn) == CODE_LABEL
+ if (LABEL_P (insn)
&& tablejump_p (NEXT_INSN (insn), &tmp_label, &tmp_table))
{
/* Do nothing; Do NOT change the current section. */
@@ -1945,7 +1945,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
app_on = 0;
}
if (NEXT_INSN (insn) != 0
- && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN)
+ && JUMP_P (NEXT_INSN (insn)))
{
rtx nextbody = PATTERN (NEXT_INSN (insn));
@@ -2214,7 +2214,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
called function. Hence we don't preserve any CC-setting
actions in these insns and the CC must be marked as being
clobbered by the function. */
- if (GET_CODE (XVECEXP (body, 0, 0)) == CALL_INSN)
+ if (CALL_P (XVECEXP (body, 0, 0)))
{
CC_STATUS_INIT;
}
@@ -2279,7 +2279,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
will cause an improper number of delay insns to be written. */
if (final_sequence == 0
&& prescan >= 0
- && GET_CODE (insn) == INSN && GET_CODE (body) == SET
+ && NONJUMP_INSN_P (insn) && GET_CODE (body) == SET
&& REG_P (SET_SRC (body))
&& REG_P (SET_DEST (body))
&& REGNO (SET_SRC (body)) == REGNO (SET_DEST (body)))
@@ -2293,7 +2293,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
do straightforwardly if the cc's were set up normally. */
if (cc_status.flags != 0
- && GET_CODE (insn) == JUMP_INSN
+ && JUMP_P (insn)
&& GET_CODE (body) == SET
&& SET_DEST (body) == pc_rtx
&& GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
@@ -2342,7 +2342,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
{
rtx cond_rtx, then_rtx, else_rtx;
- if (GET_CODE (insn) != JUMP_INSN
+ if (!JUMP_P (insn)
&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
{
cond_rtx = XEXP (SET_SRC (set), 0);
@@ -2470,7 +2470,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
current_output_insn = debug_insn = insn;
#if defined (DWARF2_UNWIND_INFO)
- if (GET_CODE (insn) == CALL_INSN && dwarf2out_do_frame ())
+ if (CALL_P (insn) && dwarf2out_do_frame ())
dwarf2out_frame_debug (insn);
#endif
@@ -2495,7 +2495,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
prev != last_ignored_compare;
prev = PREV_INSN (prev))
{
- if (GET_CODE (prev) == NOTE)
+ if (NOTE_P (prev))
delete_insn (prev); /* Use delete_note. */
}
@@ -2536,7 +2536,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
the unwind info. We've already done this for delay slots
and call instructions. */
#if defined (DWARF2_UNWIND_INFO)
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
#if !defined (HAVE_prologue)
&& !ACCUMULATE_OUTGOING_ARGS
#endif
@@ -3196,8 +3196,8 @@ output_asm_label (rtx x)
if (GET_CODE (x) == LABEL_REF)
x = XEXP (x, 0);
- if (GET_CODE (x) == CODE_LABEL
- || (GET_CODE (x) == NOTE
+ if (LABEL_P (x)
+ || (NOTE_P (x)
&& NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
else
@@ -3690,12 +3690,12 @@ leaf_function_p (void)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& ! SIBLING_CALL_P (insn))
return 0;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE
- && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
+ && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
&& ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
return 0;
}
@@ -3705,12 +3705,12 @@ leaf_function_p (void)
{
insn = XEXP (link, 0);
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& ! SIBLING_CALL_P (insn))
return 0;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE
- && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
+ && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
&& ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
return 0;
}