aboutsummaryrefslogtreecommitdiff
path: root/gcc/config
diff options
context:
space:
mode:
authorSteven Bosscher <steven@gcc.gnu.org>2013-03-23 12:11:10 +0000
committerSteven Bosscher <steven@gcc.gnu.org>2013-03-23 12:11:10 +0000
commitb64925dc4b75d48fe36c0fcd41fd02151a5ffa55 (patch)
tree17531e80a4ee959e96e3b55b923dbe168f1daa80 /gcc/config
parentb9aaf52e25a78e5b150ecc7a8b5226392e569b5e (diff)
downloadgcc-b64925dc4b75d48fe36c0fcd41fd02151a5ffa55.zip
gcc-b64925dc4b75d48fe36c0fcd41fd02151a5ffa55.tar.gz
gcc-b64925dc4b75d48fe36c0fcd41fd02151a5ffa55.tar.bz2
avr.c, [...]: Where applicable...
* config/avr/avr.c, config/bfin/bfin.c, config/c6x/c6x.c, config/epiphany/epiphany.c, config/frv/frv.c, config/ia64/ia64.c, config/iq2000/iq2000.c, config/mcore/mcore.c, config/mep/mep.c, config/mmix/mmix.c, config/pa/pa.c, config/rs6000/rs6000.c, config/s390/s390.c, config/sparc/sparc.c, config/spu/spu.c, config/stormy16/stormy16.c, config/v850/v850.c, config/xtensa/xtensa.c, dwarf2out.c, hw-doloop.c, resource.c, rtl.h : Where applicable, use the predicates NOTE_P, NONJUMP_INSN_P, JUMP_P, CALL_P, LABEL_P, and BARRIER_P instead of GET_CODE. From-SVN: r197005
Diffstat (limited to 'gcc/config')
-rw-r--r--gcc/config/avr/avr.c4
-rw-r--r--gcc/config/bfin/bfin.c6
-rw-r--r--gcc/config/c6x/c6x.c2
-rw-r--r--gcc/config/epiphany/epiphany.c2
-rw-r--r--gcc/config/frv/frv.c14
-rw-r--r--gcc/config/ia64/ia64.c45
-rw-r--r--gcc/config/iq2000/iq2000.c15
-rw-r--r--gcc/config/mcore/mcore.c22
-rw-r--r--gcc/config/mep/mep.c26
-rw-r--r--gcc/config/mmix/mmix.c2
-rw-r--r--gcc/config/pa/pa.c73
-rw-r--r--gcc/config/rs6000/rs6000.c11
-rw-r--r--gcc/config/s390/s390.c44
-rw-r--r--gcc/config/sparc/sparc.c10
-rw-r--r--gcc/config/spu/spu.c6
-rw-r--r--gcc/config/stormy16/stormy16.c12
-rw-r--r--gcc/config/v850/v850.c6
-rw-r--r--gcc/config/xtensa/xtensa.c2
18 files changed, 142 insertions, 160 deletions
diff --git a/gcc/config/avr/avr.c b/gcc/config/avr/avr.c
index c916d6b..3f2b54a 100644
--- a/gcc/config/avr/avr.c
+++ b/gcc/config/avr/avr.c
@@ -7629,9 +7629,9 @@ _reg_unused_after (rtx insn, rtx reg)
rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
rtx set = single_set (this_insn);
- if (GET_CODE (this_insn) == CALL_INSN)
+ if (CALL_P (this_insn))
code = CALL_INSN;
- else if (GET_CODE (this_insn) == JUMP_INSN)
+ else if (JUMP_P (this_insn))
{
if (INSN_ANNULLED_BRANCH_P (this_insn))
return 0;
diff --git a/gcc/config/bfin/bfin.c b/gcc/config/bfin/bfin.c
index 97c1d21..a2d22c1 100644
--- a/gcc/config/bfin/bfin.c
+++ b/gcc/config/bfin/bfin.c
@@ -3887,8 +3887,7 @@ gen_one_bundle (rtx slot[3])
rtx t = NEXT_INSN (slot[0]);
while (t != slot[1])
{
- if (GET_CODE (t) != NOTE
- || NOTE_KIND (t) != NOTE_INSN_DELETED)
+ if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
return false;
t = NEXT_INSN (t);
}
@@ -3898,8 +3897,7 @@ gen_one_bundle (rtx slot[3])
rtx t = NEXT_INSN (slot[1]);
while (t != slot[2])
{
- if (GET_CODE (t) != NOTE
- || NOTE_KIND (t) != NOTE_INSN_DELETED)
+ if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
return false;
t = NEXT_INSN (t);
}
diff --git a/gcc/config/c6x/c6x.c b/gcc/config/c6x/c6x.c
index e0db8b12..f66ad55 100644
--- a/gcc/config/c6x/c6x.c
+++ b/gcc/config/c6x/c6x.c
@@ -4848,7 +4848,7 @@ reorg_split_calls (rtx *call_labels)
{
unsigned int reservation_mask = 0;
rtx insn = get_insns ();
- gcc_assert (GET_CODE (insn) == NOTE);
+ gcc_assert (NOTE_P (insn));
insn = next_real_insn (insn);
while (insn)
{
diff --git a/gcc/config/epiphany/epiphany.c b/gcc/config/epiphany/epiphany.c
index 782dc7b..5520a63 100644
--- a/gcc/config/epiphany/epiphany.c
+++ b/gcc/config/epiphany/epiphany.c
@@ -2386,7 +2386,7 @@ epiphany_mode_after (int entity, int last_mode, rtx insn)
calls. */
if (entity == EPIPHANY_MSW_ENTITY_AND || entity == EPIPHANY_MSW_ENTITY_OR)
{
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return 0;
return last_mode;
}
diff --git a/gcc/config/frv/frv.c b/gcc/config/frv/frv.c
index 8fe4655..fd5349f 100644
--- a/gcc/config/frv/frv.c
+++ b/gcc/config/frv/frv.c
@@ -1408,7 +1408,7 @@ frv_function_contains_far_jump (void)
{
rtx insn = get_insns ();
while (insn != NULL
- && !(GET_CODE (insn) == JUMP_INSN
+ && !(JUMP_P (insn)
/* Ignore tablejump patterns. */
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
@@ -1446,7 +1446,7 @@ frv_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
simply emit a different assembly directive because bralr and jmpl
execute in different units. */
for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
rtx pattern = PATTERN (insn);
if (GET_CODE (pattern) == PARALLEL
@@ -2649,7 +2649,7 @@ frv_print_operand_jump_hint (rtx insn)
HOST_WIDE_INT prob = -1;
enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
- gcc_assert (GET_CODE (insn) == JUMP_INSN);
+ gcc_assert (JUMP_P (insn));
/* Assume any non-conditional jump is likely. */
if (! any_condjump_p (insn))
@@ -7387,7 +7387,7 @@ frv_pack_insn_p (rtx insn)
- There's no point putting a call in its own packet unless
we have to. */
if (frv_packet.num_insns > 0
- && GET_CODE (insn) == INSN
+ && NONJUMP_INSN_P (insn)
&& GET_MODE (insn) == TImode
&& GET_CODE (PATTERN (insn)) != COND_EXEC)
return false;
@@ -7430,7 +7430,7 @@ frv_insert_nop_in_packet (rtx insn)
packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
last = frv_packet.insns[frv_packet.num_insns - 1];
- if (GET_CODE (last) != INSN)
+ if (! NONJUMP_INSN_P (last))
{
insn = emit_insn_before (PATTERN (insn), last);
frv_packet.insns[frv_packet.num_insns - 1] = insn;
@@ -7492,7 +7492,7 @@ frv_for_each_packet (void (*handle_packet) (void))
default:
/* Calls mustn't be packed on a TOMCAT. */
- if (GET_CODE (insn) == CALL_INSN && frv_cpu_type == FRV_CPU_TOMCAT)
+ if (CALL_P (insn) && frv_cpu_type == FRV_CPU_TOMCAT)
frv_finish_packet (handle_packet);
/* Since the last instruction in a packet determines the EH
@@ -7913,7 +7913,7 @@ frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
CLEAR_HARD_REG_SET (used_regs);
for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
/* We can't predict what a call will do to volatile memory. */
memset (next_io, 0, sizeof (struct frv_io));
diff --git a/gcc/config/ia64/ia64.c b/gcc/config/ia64/ia64.c
index 28e4483..0e328ff 100644
--- a/gcc/config/ia64/ia64.c
+++ b/gcc/config/ia64/ia64.c
@@ -5470,7 +5470,7 @@ ia64_print_operand (FILE * file, rtx x, int code)
else
which = ".sptk";
}
- else if (GET_CODE (current_output_insn) == CALL_INSN)
+ else if (CALL_P (current_output_insn))
which = ".sptk";
else
which = ".dptk";
@@ -6811,8 +6811,7 @@ group_barrier_needed (rtx insn)
memset (rws_insn, 0, sizeof (rws_insn));
/* Don't bundle a call following another call. */
- if ((pat = prev_active_insn (insn))
- && GET_CODE (pat) == CALL_INSN)
+ if ((pat = prev_active_insn (insn)) && CALL_P (pat))
{
need_barrier = 1;
break;
@@ -6826,8 +6825,7 @@ group_barrier_needed (rtx insn)
flags.is_branch = 1;
/* Don't bundle a jump following a call. */
- if ((pat = prev_active_insn (insn))
- && GET_CODE (pat) == CALL_INSN)
+ if ((pat = prev_active_insn (insn)) && CALL_P (pat))
{
need_barrier = 1;
break;
@@ -6929,20 +6927,20 @@ emit_insn_group_barriers (FILE *dump)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
if (insns_since_last_label)
last_label = insn;
insns_since_last_label = 0;
}
- else if (GET_CODE (insn) == NOTE
+ else if (NOTE_P (insn)
&& NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK)
{
if (insns_since_last_label)
last_label = insn;
insns_since_last_label = 0;
}
- else if (GET_CODE (insn) == INSN
+ else if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
&& XINT (PATTERN (insn), 1) == UNSPECV_INSN_GROUP_BARRIER)
{
@@ -6983,13 +6981,13 @@ emit_all_insn_group_barriers (FILE *dump ATTRIBUTE_UNUSED)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
{
rtx last = prev_active_insn (insn);
if (! last)
continue;
- if (GET_CODE (last) == JUMP_INSN
+ if (JUMP_P (last)
&& GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
last = prev_active_insn (last);
if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
@@ -7487,7 +7485,7 @@ ia64_variable_issue (FILE *dump ATTRIBUTE_UNUSED,
int needed = group_barrier_needed (insn);
gcc_assert (!needed);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
init_insn_group_barriers ();
stops_p [INSN_UID (insn)] = stop_before_p;
stop_before_p = 0;
@@ -7576,7 +7574,7 @@ ia64_dfa_new_cycle (FILE *dump, int verbose, rtx insn, int last_clock,
&& last_scheduled_insn
&& scheduled_good_insn (last_scheduled_insn))))
|| (last_scheduled_insn
- && (GET_CODE (last_scheduled_insn) == CALL_INSN
+ && (CALL_P (last_scheduled_insn)
|| unknown_for_bundling_p (last_scheduled_insn))))
{
init_insn_group_barriers ();
@@ -7594,7 +7592,7 @@ ia64_dfa_new_cycle (FILE *dump, int verbose, rtx insn, int last_clock,
state_transition (curr_state, dfa_stop_insn);
if (TARGET_EARLY_STOP_BITS)
*sort_p = (last_scheduled_insn == NULL_RTX
- || GET_CODE (last_scheduled_insn) != CALL_INSN);
+ || ! CALL_P (last_scheduled_insn));
else
*sort_p = 0;
return 1;
@@ -8936,9 +8934,9 @@ ia64_add_bundle_selector_before (int template0, rtx insn)
{
do
insn = next_active_insn (insn);
- while (GET_CODE (insn) == INSN
+ while (NONJUMP_INSN_P (insn)
&& get_attr_empty (insn) == EMPTY_YES);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
else if (note)
{
@@ -9372,13 +9370,13 @@ final_emit_insn_group_barriers (FILE *dump ATTRIBUTE_UNUSED)
insn != current_sched_info->next_tail;
insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
{
rtx last = prev_active_insn (insn);
if (! last)
continue;
- if (GET_CODE (last) == JUMP_INSN
+ if (JUMP_P (last)
&& GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
last = prev_active_insn (last);
if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
@@ -9445,8 +9443,7 @@ final_emit_insn_group_barriers (FILE *dump ATTRIBUTE_UNUSED)
else if (recog_memoized (insn) >= 0
&& important_for_bundling_p (insn))
seen_good_insn = 1;
- need_barrier_p = (GET_CODE (insn) == CALL_INSN
- || unknown_for_bundling_p (insn));
+ need_barrier_p = (CALL_P (insn) || unknown_for_bundling_p (insn));
}
}
}
@@ -9590,7 +9587,7 @@ emit_predicate_relation_info (void)
rtx head = BB_HEAD (bb);
/* We only need such notes at code labels. */
- if (GET_CODE (head) != CODE_LABEL)
+ if (! LABEL_P (head))
continue;
if (NOTE_INSN_BASIC_BLOCK_P (NEXT_INSN (head)))
head = NEXT_INSN (head);
@@ -9618,7 +9615,7 @@ emit_predicate_relation_info (void)
while (1)
{
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& GET_CODE (PATTERN (insn)) == COND_EXEC
&& find_reg_note (insn, REG_NORETURN, NULL_RTX))
{
@@ -9766,7 +9763,7 @@ ia64_reorg (void)
if (insn)
{
/* Skip over insns that expand to nothing. */
- while (GET_CODE (insn) == INSN
+ while (NONJUMP_INSN_P (insn)
&& get_attr_empty (insn) == EMPTY_YES)
{
if (GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
@@ -9774,7 +9771,7 @@ ia64_reorg (void)
saw_stop = 1;
insn = prev_active_insn (insn);
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
if (! saw_stop)
emit_insn (gen_insn_group_barrier (GEN_INT (3)));
@@ -10184,7 +10181,7 @@ ia64_asm_unwind_emit (FILE *asm_out_file, rtx insn)
}
}
- if (GET_CODE (insn) == NOTE || ! RTX_FRAME_RELATED_P (insn))
+ if (NOTE_P (insn) || ! RTX_FRAME_RELATED_P (insn))
return;
/* Look for the ALLOC insn. */
diff --git a/gcc/config/iq2000/iq2000.c b/gcc/config/iq2000/iq2000.c
index c293398..7e19366 100644
--- a/gcc/config/iq2000/iq2000.c
+++ b/gcc/config/iq2000/iq2000.c
@@ -381,8 +381,7 @@ iq2000_fill_delay_slot (const char *ret, enum delay_type type, rtx operands[],
/* Make sure that we don't put nop's after labels. */
next_insn = NEXT_INSN (cur_insn);
while (next_insn != 0
- && (GET_CODE (next_insn) == NOTE
- || GET_CODE (next_insn) == CODE_LABEL))
+ && (NOTE_P (next_insn) || LABEL_P (next_insn)))
next_insn = NEXT_INSN (next_insn);
dslots_load_total += num_nops;
@@ -391,7 +390,7 @@ iq2000_fill_delay_slot (const char *ret, enum delay_type type, rtx operands[],
|| operands == 0
|| cur_insn == 0
|| next_insn == 0
- || GET_CODE (next_insn) == CODE_LABEL
+ || LABEL_P (next_insn)
|| (set_reg = operands[0]) == 0)
{
dslots_number_nops = 0;
@@ -1533,8 +1532,8 @@ final_prescan_insn (rtx insn, rtx opvec[] ATTRIBUTE_UNUSED,
iq2000_load_reg4 = 0;
}
- if ( (GET_CODE (insn) == JUMP_INSN
- || GET_CODE (insn) == CALL_INSN
+ if ( (JUMP_P (insn)
+ || CALL_P (insn)
|| (GET_CODE (PATTERN (insn)) == RETURN))
&& NEXT_INSN (PREV_INSN (insn)) == insn)
{
@@ -1544,7 +1543,7 @@ final_prescan_insn (rtx insn, rtx opvec[] ATTRIBUTE_UNUSED,
}
if (TARGET_STATS
- && (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CALL_INSN))
+ && (JUMP_P (insn) || CALL_P (insn)))
dslots_jump_total ++;
}
@@ -2285,8 +2284,8 @@ iq2000_adjust_insn_length (rtx insn, int length)
/* A unconditional jump has an unfilled delay slot if it is not part
of a sequence. A conditional jump normally has a delay slot. */
if (simplejump_p (insn)
- || ( (GET_CODE (insn) == JUMP_INSN
- || GET_CODE (insn) == CALL_INSN)))
+ || ( (JUMP_P (insn)
+ || CALL_P (insn))))
length += 4;
return length;
diff --git a/gcc/config/mcore/mcore.c b/gcc/config/mcore/mcore.c
index e730362..6550b69 100644
--- a/gcc/config/mcore/mcore.c
+++ b/gcc/config/mcore/mcore.c
@@ -914,10 +914,10 @@ mcore_is_dead (rtx first, rtx reg)
to assume that it is live. */
for (insn = NEXT_INSN (first); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
return 0; /* We lose track, assume it is alive. */
- else if (GET_CODE(insn) == CALL_INSN)
+ else if (CALL_P (insn))
{
/* Call's might use it for target or register parms. */
if (reg_referenced_p (reg, PATTERN (insn))
@@ -926,7 +926,7 @@ mcore_is_dead (rtx first, rtx reg)
else if (dead_or_set_p (insn, reg))
return 1;
}
- else if (GET_CODE (insn) == INSN)
+ else if (NONJUMP_INSN_P (insn))
{
if (reg_referenced_p (reg, PATTERN (insn)))
return 0;
@@ -2254,7 +2254,7 @@ is_cond_candidate (rtx insn)
changed into a conditional. Only bother with SImode items. If
we wanted to be a little more aggressive, we could also do other
modes such as DImode with reg-reg move or load 0. */
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
{
rtx pat = PATTERN (insn);
rtx src, dst;
@@ -2305,9 +2305,9 @@ is_cond_candidate (rtx insn)
*/
}
- else if (GET_CODE (insn) == JUMP_INSN &&
- GET_CODE (PATTERN (insn)) == SET &&
- GET_CODE (XEXP (PATTERN (insn), 1)) == LABEL_REF)
+ else if (JUMP_P (insn)
+ && GET_CODE (PATTERN (insn)) == SET
+ && GET_CODE (XEXP (PATTERN (insn), 1)) == LABEL_REF)
return COND_BRANCH_INSN;
return COND_NO;
@@ -2328,7 +2328,7 @@ emit_new_cond_insn (rtx insn, int cond)
pat = PATTERN (insn);
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
{
dst = SET_DEST (pat);
src = SET_SRC (pat);
@@ -2449,9 +2449,9 @@ conditionalize_block (rtx first)
/* Check that the first insn is a candidate conditional jump. This is
the one that we'll eliminate. If not, advance to the next insn to
try. */
- if (GET_CODE (first) != JUMP_INSN ||
- GET_CODE (PATTERN (first)) != SET ||
- GET_CODE (XEXP (PATTERN (first), 1)) != IF_THEN_ELSE)
+ if (! JUMP_P (first)
+ || GET_CODE (PATTERN (first)) != SET
+ || GET_CODE (XEXP (PATTERN (first), 1)) != IF_THEN_ELSE)
return NEXT_INSN (first);
/* Extract some information we need. */
diff --git a/gcc/config/mep/mep.c b/gcc/config/mep/mep.c
index c190d31..60054f9 100644
--- a/gcc/config/mep/mep.c
+++ b/gcc/config/mep/mep.c
@@ -4882,7 +4882,7 @@ mep_reorg_regmove (rtx insns)
if (dump_file)
for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
before++;
/* We're looking for (set r2 r1) moves where r1 dies, followed by a
@@ -4896,7 +4896,7 @@ mep_reorg_regmove (rtx insns)
for (insn = insns; insn; insn = next)
{
next = next_nonnote_nondebug_insn (insn);
- if (GET_CODE (insn) != INSN)
+ if (! NONJUMP_INSN_P (insn))
continue;
pat = PATTERN (insn);
@@ -4912,7 +4912,7 @@ mep_reorg_regmove (rtx insns)
if (dump_file)
fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
- while (follow && GET_CODE (follow) == INSN
+ while (follow && NONJUMP_INSN_P (follow)
&& GET_CODE (PATTERN (follow)) == SET
&& !dead_or_set_p (follow, SET_SRC (pat))
&& !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
@@ -4925,7 +4925,7 @@ mep_reorg_regmove (rtx insns)
if (dump_file)
fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
- if (follow && GET_CODE (follow) == INSN
+ if (follow && NONJUMP_INSN_P (follow)
&& GET_CODE (PATTERN (follow)) == SET
&& find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
{
@@ -5523,8 +5523,7 @@ mep_reorg_erepeat (rtx insns)
count = simplejump_p (insn) ? 0 : 1;
for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
{
- if (GET_CODE (prev) == CALL_INSN
- || BARRIER_P (prev))
+ if (CALL_P (prev) || BARRIER_P (prev))
break;
if (prev == JUMP_LABEL (insn))
@@ -5543,10 +5542,10 @@ mep_reorg_erepeat (rtx insns)
*after* the label. */
rtx barrier;
for (barrier = PREV_INSN (prev);
- barrier && GET_CODE (barrier) == NOTE;
+ barrier && NOTE_P (barrier);
barrier = PREV_INSN (barrier))
;
- if (barrier && GET_CODE (barrier) != BARRIER)
+ if (barrier && ! BARRIER_P (barrier))
break;
}
else
@@ -5590,10 +5589,9 @@ mep_reorg_erepeat (rtx insns)
if (LABEL_NUSES (prev) == 1)
{
for (user = PREV_INSN (prev);
- user && (INSN_P (user) || GET_CODE (user) == NOTE);
+ user && (INSN_P (user) || NOTE_P (user));
user = PREV_INSN (user))
- if (GET_CODE (user) == JUMP_INSN
- && JUMP_LABEL (user) == prev)
+ if (JUMP_P (user) && JUMP_LABEL (user) == prev)
{
safe = INSN_UID (user);
break;
@@ -5631,8 +5629,8 @@ mep_jmp_return_reorg (rtx insns)
/* Find the fist real insn the jump jumps to. */
label = ret = JUMP_LABEL (insn);
while (ret
- && (GET_CODE (ret) == NOTE
- || GET_CODE (ret) == CODE_LABEL
+ && (NOTE_P (ret)
+ || LABEL_P (ret)
|| GET_CODE (PATTERN (ret)) == USE))
ret = NEXT_INSN (ret);
@@ -7018,7 +7016,7 @@ mep_bundle_insns (rtx insns)
if (recog_memoized (insn) >= 0
&& get_attr_slot (insn) == SLOT_COP)
{
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
|| ! last
|| recog_memoized (last) < 0
|| get_attr_slot (last) != SLOT_CORE
diff --git a/gcc/config/mmix/mmix.c b/gcc/config/mmix/mmix.c
index 1a81d4c..1af09e5 100644
--- a/gcc/config/mmix/mmix.c
+++ b/gcc/config/mmix/mmix.c
@@ -1728,7 +1728,7 @@ mmix_print_operand (FILE *stream, rtx x, int code)
if (CONSTANT_P (modified_x)
/* Strangely enough, this is not included in CONSTANT_P.
FIXME: Ask/check about sanity here. */
- || GET_CODE (modified_x) == CODE_LABEL)
+ || LABEL_P (modified_x))
{
output_addr_const (stream, modified_x);
return;
diff --git a/gcc/config/pa/pa.c b/gcc/config/pa/pa.c
index 0d39483..1cad695 100644
--- a/gcc/config/pa/pa.c
+++ b/gcc/config/pa/pa.c
@@ -3320,7 +3320,7 @@ remove_useless_addtr_insns (int check_notes)
rtx tmp;
/* Ignore anything that isn't an INSN or a JUMP_INSN. */
- if (GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
+ if (! NONJUMP_INSN_P (insn) && ! JUMP_P (insn))
continue;
tmp = PATTERN (insn);
@@ -3359,7 +3359,7 @@ remove_useless_addtr_insns (int check_notes)
rtx tmp, next;
/* Ignore anything that isn't an INSN. */
- if (GET_CODE (insn) != INSN)
+ if (! NONJUMP_INSN_P (insn))
continue;
tmp = PATTERN (insn);
@@ -3382,13 +3382,11 @@ remove_useless_addtr_insns (int check_notes)
while (next)
{
/* Jumps, calls and labels stop our search. */
- if (GET_CODE (next) == JUMP_INSN
- || GET_CODE (next) == CALL_INSN
- || GET_CODE (next) == CODE_LABEL)
+ if (JUMP_P (next) || CALL_P (next) || LABEL_P (next))
break;
/* As does another fcmp insn. */
- if (GET_CODE (next) == INSN
+ if (NONJUMP_INSN_P (next)
&& GET_CODE (PATTERN (next)) == SET
&& GET_CODE (SET_DEST (PATTERN (next))) == REG
&& REGNO (SET_DEST (PATTERN (next))) == 0)
@@ -3398,8 +3396,7 @@ remove_useless_addtr_insns (int check_notes)
}
/* Is NEXT_INSN a branch? */
- if (next
- && GET_CODE (next) == JUMP_INSN)
+ if (next && JUMP_P (next))
{
rtx pattern = PATTERN (next);
@@ -4160,16 +4157,16 @@ pa_output_function_epilogue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
always point to a valid instruction in the current function. */
/* Get the last real insn. */
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
insn = prev_real_insn (insn);
/* If it is a sequence, then look inside. */
- if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
/* If insn is a CALL_INSN, then it must be a call to a volatile
function (otherwise there would be epilogue insns). */
- if (insn && GET_CODE (insn) == CALL_INSN)
+ if (insn && CALL_P (insn))
{
fputs ("\tnop\n", file);
last_address += 4;
@@ -4930,12 +4927,12 @@ pa_adjust_insn_length (rtx insn, int length)
/* Jumps inside switch tables which have unfilled delay slots need
adjustment. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& GET_CODE (pat) == PARALLEL
&& get_attr_type (insn) == TYPE_BTABLE_BRANCH)
length += 4;
/* Block move pattern. */
- else if (GET_CODE (insn) == INSN
+ else if (NONJUMP_INSN_P (insn)
&& GET_CODE (pat) == PARALLEL
&& GET_CODE (XVECEXP (pat, 0, 0)) == SET
&& GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
@@ -4944,7 +4941,7 @@ pa_adjust_insn_length (rtx insn, int length)
&& GET_MODE (XEXP (XVECEXP (pat, 0, 0), 1)) == BLKmode)
length += compute_movmem_length (insn) - 4;
/* Block clear pattern. */
- else if (GET_CODE (insn) == INSN
+ else if (NONJUMP_INSN_P (insn)
&& GET_CODE (pat) == PARALLEL
&& GET_CODE (XVECEXP (pat, 0, 0)) == SET
&& GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
@@ -4952,7 +4949,7 @@ pa_adjust_insn_length (rtx insn, int length)
&& GET_MODE (XEXP (XVECEXP (pat, 0, 0), 0)) == BLKmode)
length += compute_clrmem_length (insn) - 4;
/* Conditional branch with an unfilled delay slot. */
- else if (GET_CODE (insn) == JUMP_INSN && ! simplejump_p (insn))
+ else if (JUMP_P (insn) && ! simplejump_p (insn))
{
/* Adjust a short backwards conditional with an unfilled delay slot. */
if (GET_CODE (pat) == SET
@@ -5846,7 +5843,7 @@ pa_output_arg_descriptor (rtx call_insn)
return;
}
- gcc_assert (GET_CODE (call_insn) == CALL_INSN);
+ gcc_assert (CALL_P (call_insn));
for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
link; link = XEXP (link, 1))
{
@@ -6641,7 +6638,7 @@ pa_output_lbranch (rtx dest, rtx insn, int xdelay)
if (xdelay && dbr_sequence_length () != 0)
{
/* We can't handle a jump in the delay slot. */
- gcc_assert (GET_CODE (NEXT_INSN (insn)) != JUMP_INSN);
+ gcc_assert (! JUMP_P (NEXT_INSN (insn)));
final_scan_insn (NEXT_INSN (insn), asm_out_file,
optimize, 0, NULL);
@@ -7650,7 +7647,7 @@ pa_output_millicode_call (rtx insn, rtx call_dest)
output_asm_insn ("nop", xoperands);
/* We are done if there isn't a jump in the delay slot. */
- if (seq_length == 0 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
+ if (seq_length == 0 || ! JUMP_P (NEXT_INSN (insn)))
return "";
/* This call has an unconditional jump in its delay slot. */
@@ -7708,7 +7705,7 @@ pa_attr_length_call (rtx insn, int sibcall)
rtx pat = PATTERN (insn);
unsigned long distance = -1;
- gcc_assert (GET_CODE (insn) == CALL_INSN);
+ gcc_assert (CALL_P (insn));
if (INSN_ADDRESSES_SET_P ())
{
@@ -7822,7 +7819,7 @@ pa_output_call (rtx insn, rtx call_dest, int sibcall)
delay slot. We can't do this in a sibcall as we don't
have a second call-clobbered scratch register available. */
if (seq_length != 0
- && GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
+ && ! JUMP_P (NEXT_INSN (insn))
&& !sibcall)
{
final_scan_insn (NEXT_INSN (insn), asm_out_file,
@@ -7866,7 +7863,7 @@ pa_output_call (rtx insn, rtx call_dest, int sibcall)
indirect_call = 1;
if (seq_length != 0
- && GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
+ && ! JUMP_P (NEXT_INSN (insn))
&& !sibcall
&& (!TARGET_PA_20
|| indirect_call
@@ -8032,7 +8029,7 @@ pa_output_call (rtx insn, rtx call_dest, int sibcall)
/* We are done if there isn't a jump in the delay slot. */
if (seq_length == 0
|| delay_insn_deleted
- || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
+ || ! JUMP_P (NEXT_INSN (insn)))
return "";
/* A sibcall should never have a branch in the delay slot. */
@@ -8826,12 +8823,12 @@ int
pa_jump_in_call_delay (rtx insn)
{
- if (GET_CODE (insn) != JUMP_INSN)
+ if (! JUMP_P (insn))
return 0;
if (PREV_INSN (insn)
&& PREV_INSN (PREV_INSN (insn))
- && GET_CODE (next_real_insn (PREV_INSN (PREV_INSN (insn)))) == INSN)
+ && NONJUMP_INSN_P (next_real_insn (PREV_INSN (PREV_INSN (insn)))))
{
rtx test_insn = next_real_insn (PREV_INSN (PREV_INSN (insn)));
@@ -8928,14 +8925,14 @@ pa_following_call (rtx insn)
/* Find the previous real insn, skipping NOTEs. */
insn = PREV_INSN (insn);
- while (insn && GET_CODE (insn) == NOTE)
+ while (insn && NOTE_P (insn))
insn = PREV_INSN (insn);
/* Check for CALL_INSNs and millicode calls. */
if (insn
- && ((GET_CODE (insn) == CALL_INSN
+ && ((CALL_P (insn)
&& get_attr_type (insn) != TYPE_DYNCALL)
- || (GET_CODE (insn) == INSN
+ || (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != SEQUENCE
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER
@@ -9000,7 +8997,7 @@ pa_reorg (void)
unsigned int length, i;
/* Find an ADDR_VEC or ADDR_DIFF_VEC insn to explode. */
- if (GET_CODE (insn) != JUMP_INSN
+ if (! JUMP_P (insn)
|| (GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
continue;
@@ -9059,7 +9056,7 @@ pa_reorg (void)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
/* Find an ADDR_VEC insn. */
- if (GET_CODE (insn) != JUMP_INSN
+ if (! JUMP_P (insn)
|| (GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
continue;
@@ -9140,9 +9137,7 @@ pa_combine_instructions (void)
/* We only care about INSNs, JUMP_INSNs, and CALL_INSNs.
Also ignore any special USE insns. */
- if ((GET_CODE (anchor) != INSN
- && GET_CODE (anchor) != JUMP_INSN
- && GET_CODE (anchor) != CALL_INSN)
+ if ((! NONJUMP_INSN_P (anchor) && ! JUMP_P (anchor) && ! CALL_P (anchor))
|| GET_CODE (PATTERN (anchor)) == USE
|| GET_CODE (PATTERN (anchor)) == CLOBBER
|| GET_CODE (PATTERN (anchor)) == ADDR_VEC
@@ -9162,14 +9157,14 @@ pa_combine_instructions (void)
floater;
floater = PREV_INSN (floater))
{
- if (GET_CODE (floater) == NOTE
- || (GET_CODE (floater) == INSN
+ if (NOTE_P (floater)
+ || (NONJUMP_INSN_P (floater)
&& (GET_CODE (PATTERN (floater)) == USE
|| GET_CODE (PATTERN (floater)) == CLOBBER)))
continue;
/* Anything except a regular INSN will stop our search. */
- if (GET_CODE (floater) != INSN
+ if (! NONJUMP_INSN_P (floater)
|| GET_CODE (PATTERN (floater)) == ADDR_VEC
|| GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
{
@@ -9223,15 +9218,15 @@ pa_combine_instructions (void)
{
for (floater = anchor; floater; floater = NEXT_INSN (floater))
{
- if (GET_CODE (floater) == NOTE
- || (GET_CODE (floater) == INSN
+ if (NOTE_P (floater)
+ || (NONJUMP_INSN_P (floater)
&& (GET_CODE (PATTERN (floater)) == USE
|| GET_CODE (PATTERN (floater)) == CLOBBER)))
continue;
/* Anything except a regular INSN will stop our search. */
- if (GET_CODE (floater) != INSN
+ if (! NONJUMP_INSN_P (floater)
|| GET_CODE (PATTERN (floater)) == ADDR_VEC
|| GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
{
@@ -9386,7 +9381,7 @@ pa_can_combine_p (rtx new_rtx, rtx anchor, rtx floater, int reversed, rtx dest,
int
pa_insn_refs_are_delayed (rtx insn)
{
- return ((GET_CODE (insn) == INSN
+ return ((NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != SEQUENCE
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER
diff --git a/gcc/config/rs6000/rs6000.c b/gcc/config/rs6000/rs6000.c
index 921ff4b..0fe45d8 100644
--- a/gcc/config/rs6000/rs6000.c
+++ b/gcc/config/rs6000/rs6000.c
@@ -17843,9 +17843,8 @@ compute_save_world_info (rs6000_stack_t *info_ptr)
if (WORLD_SAVE_P (info_ptr))
{
rtx insn;
- for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
- if ( GET_CODE (insn) == CALL_INSN
- && SIBLING_CALL_P (insn))
+ for (insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
+ if (CALL_P (insn) && SIBLING_CALL_P (insn))
{
info_ptr->world_save_p = 0;
break;
@@ -23837,7 +23836,7 @@ is_load_insn (rtx insn, rtx *load_mem)
if (!insn || !INSN_P (insn))
return false;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return false;
return is_load_insn1 (PATTERN (insn), load_mem);
@@ -24232,7 +24231,7 @@ insn_must_be_first_in_group (rtx insn)
enum attr_type type;
if (!insn
- || GET_CODE (insn) == NOTE
+ || NOTE_P (insn)
|| DEBUG_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER)
@@ -24363,7 +24362,7 @@ insn_must_be_last_in_group (rtx insn)
enum attr_type type;
if (!insn
- || GET_CODE (insn) == NOTE
+ || NOTE_P (insn)
|| DEBUG_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER)
diff --git a/gcc/config/s390/s390.c b/gcc/config/s390/s390.c
index 7e87dcd..f6aa581 100644
--- a/gcc/config/s390/s390.c
+++ b/gcc/config/s390/s390.c
@@ -5738,7 +5738,7 @@ addr_generation_dependency_p (rtx dep_rtx, rtx insn)
{
rtx target, pat;
- if (GET_CODE (dep_rtx) == INSN)
+ if (NONJUMP_INSN_P (dep_rtx))
dep_rtx = PATTERN (dep_rtx);
if (GET_CODE (dep_rtx) == SET)
@@ -5978,7 +5978,7 @@ s390_split_branches (void)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) != JUMP_INSN)
+ if (! JUMP_P (insn))
continue;
pat = PATTERN (insn);
@@ -6398,7 +6398,7 @@ s390_find_constant (struct constant_pool *pool, rtx val,
static rtx
s390_execute_label (rtx insn)
{
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == PARALLEL
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
&& XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
@@ -6603,7 +6603,7 @@ s390_mainpool_start (void)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
&& XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
@@ -6616,7 +6616,7 @@ s390_mainpool_start (void)
{
s390_add_execute (pool, insn);
}
- else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx pool_ref = NULL_RTX;
find_constant_pool_ref (PATTERN (insn), &pool_ref);
@@ -6758,7 +6758,7 @@ s390_mainpool_finish (struct constant_pool *pool)
if (INSN_P (insn))
replace_ltrel_base (&PATTERN (insn));
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx addr, pool_ref = NULL_RTX;
find_constant_pool_ref (PATTERN (insn), &pool_ref);
@@ -6840,7 +6840,7 @@ s390_chunkify_start (void)
s390_add_execute (curr_pool, insn);
s390_add_pool_insn (curr_pool, insn);
}
- else if (GET_CODE (insn) == INSN || CALL_P (insn))
+ else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx pool_ref = NULL_RTX;
find_constant_pool_ref (PATTERN (insn), &pool_ref);
@@ -6867,7 +6867,7 @@ s390_chunkify_start (void)
}
}
- if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
+ if (JUMP_P (insn) || LABEL_P (insn))
{
if (curr_pool)
s390_add_pool_insn (curr_pool, insn);
@@ -6911,7 +6911,7 @@ s390_chunkify_start (void)
Those will have an effect on code size, which we need to
consider here. This calculation makes rather pessimistic
worst-case assumptions. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
extra_size += 6;
if (chunk_size < S390_POOL_CHUNK_MIN
@@ -6920,7 +6920,7 @@ s390_chunkify_start (void)
continue;
/* Pool chunks can only be inserted after BARRIERs ... */
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
{
s390_end_pool (curr_pool, insn);
curr_pool = NULL;
@@ -6937,7 +6937,7 @@ s390_chunkify_start (void)
if (!section_switch_p)
{
/* We can insert the barrier only after a 'real' insn. */
- if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
+ if (! NONJUMP_INSN_P (insn) && ! CALL_P (insn))
continue;
if (get_attr_length (insn) == 0)
continue;
@@ -7009,11 +7009,11 @@ s390_chunkify_start (void)
Don't do that, however, if it is the label before
a jump table. */
- if (GET_CODE (insn) == CODE_LABEL
+ if (LABEL_P (insn)
&& (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
{
rtx vec_insn = next_real_insn (insn);
- rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
+ rtx vec_pat = vec_insn && JUMP_P (vec_insn) ?
PATTERN (vec_insn) : NULL_RTX;
if (!vec_pat
|| !(GET_CODE (vec_pat) == ADDR_VEC
@@ -7023,7 +7023,7 @@ s390_chunkify_start (void)
/* If we have a direct jump (conditional or unconditional)
or a casesi jump, check all potential targets. */
- else if (GET_CODE (insn) == JUMP_INSN)
+ else if (JUMP_P (insn))
{
rtx pat = PATTERN (insn);
if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
@@ -7048,7 +7048,7 @@ s390_chunkify_start (void)
/* Find the jump table used by this casesi jump. */
rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
rtx vec_insn = next_real_insn (vec_label);
- rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
+ rtx vec_pat = vec_insn && JUMP_P (vec_insn) ?
PATTERN (vec_insn) : NULL_RTX;
if (vec_pat
&& (GET_CODE (vec_pat) == ADDR_VEC
@@ -7082,7 +7082,7 @@ s390_chunkify_start (void)
/* Insert base register reload insns at every far label. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CODE_LABEL
+ if (LABEL_P (insn)
&& bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
{
struct constant_pool *pool = s390_find_pool (pool_list, insn);
@@ -7128,7 +7128,7 @@ s390_chunkify_finish (struct constant_pool *pool_list)
if (!curr_pool)
continue;
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx addr, pool_ref = NULL_RTX;
find_constant_pool_ref (PATTERN (insn), &pool_ref);
@@ -7181,9 +7181,9 @@ s390_chunkify_cancel (struct constant_pool *pool_list)
rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
rtx label = NEXT_INSN (curr_pool->pool_insn);
- if (jump && GET_CODE (jump) == JUMP_INSN
- && barrier && GET_CODE (barrier) == BARRIER
- && label && GET_CODE (label) == CODE_LABEL
+ if (jump && JUMP_P (jump)
+ && barrier && BARRIER_P (barrier)
+ && label && LABEL_P (label)
&& GET_CODE (PATTERN (jump)) == SET
&& SET_DEST (PATTERN (jump)) == pc_rtx
&& GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
@@ -7203,7 +7203,7 @@ s390_chunkify_cancel (struct constant_pool *pool_list)
{
rtx next_insn = NEXT_INSN (insn);
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
&& XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
@@ -10080,7 +10080,7 @@ s390_optimize_prologue (void)
next_insn = NEXT_INSN (insn);
- if (GET_CODE (insn) != INSN)
+ if (! NONJUMP_INSN_P (insn))
continue;
if (GET_CODE (PATTERN (insn)) == PARALLEL
diff --git a/gcc/config/sparc/sparc.c b/gcc/config/sparc/sparc.c
index f4ac6e4..3e98325 100644
--- a/gcc/config/sparc/sparc.c
+++ b/gcc/config/sparc/sparc.c
@@ -3063,10 +3063,10 @@ emit_cbcond_nop (rtx insn)
if (!next)
return 1;
- if (GET_CODE (next) == INSN
+ if (NONJUMP_INSN_P (next)
&& GET_CODE (PATTERN (next)) == SEQUENCE)
next = XVECEXP (PATTERN (next), 0, 0);
- else if (GET_CODE (next) == CALL_INSN
+ else if (CALL_P (next)
&& GET_CODE (PATTERN (next)) == PARALLEL)
{
rtx delay = XVECEXP (PATTERN (next), 0, 1);
@@ -3222,7 +3222,7 @@ eligible_for_return_delay (rtx trial)
int regno;
rtx pat;
- if (GET_CODE (trial) != INSN)
+ if (! NONJUMP_INSN_P (trial))
return 0;
if (get_attr_length (trial) != 1)
@@ -3293,7 +3293,7 @@ eligible_for_sibcall_delay (rtx trial)
{
rtx pat;
- if (GET_CODE (trial) != INSN || GET_CODE (PATTERN (trial)) != SET)
+ if (! NONJUMP_INSN_P (trial) || GET_CODE (PATTERN (trial)) != SET)
return 0;
if (get_attr_length (trial) != 1)
@@ -5424,7 +5424,7 @@ sparc_asm_function_epilogue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
last_real_insn = prev_real_insn (insn);
if (last_real_insn
- && GET_CODE (last_real_insn) == INSN
+ && NONJUMP_INSN_P (last_real_insn)
&& GET_CODE (PATTERN (last_real_insn)) == SEQUENCE)
last_real_insn = XVECEXP (PATTERN (last_real_insn), 0, 0);
diff --git a/gcc/config/spu/spu.c b/gcc/config/spu/spu.c
index 328bd5b..2d8ec9c 100644
--- a/gcc/config/spu/spu.c
+++ b/gcc/config/spu/spu.c
@@ -1962,7 +1962,7 @@ struct spu_bb_info
static struct spu_bb_info *spu_bb_info;
#define STOP_HINT_P(INSN) \
- (GET_CODE(INSN) == CALL_INSN \
+ (CALL_P(INSN) \
|| INSN_CODE(INSN) == CODE_FOR_divmodsi4 \
|| INSN_CODE(INSN) == CODE_FOR_udivmodsi4)
@@ -2163,7 +2163,7 @@ spu_emit_branch_hint (rtx before, rtx branch, rtx target,
static rtx
get_branch_target (rtx branch)
{
- if (GET_CODE (branch) == JUMP_INSN)
+ if (JUMP_P (branch))
{
rtx set, src;
@@ -2212,7 +2212,7 @@ get_branch_target (rtx branch)
return src;
}
- else if (GET_CODE (branch) == CALL_INSN)
+ else if (CALL_P (branch))
{
rtx call;
/* All of our call patterns are in a PARALLEL and the CALL is
diff --git a/gcc/config/stormy16/stormy16.c b/gcc/config/stormy16/stormy16.c
index 278ac17..30d6d78 100644
--- a/gcc/config/stormy16/stormy16.c
+++ b/gcc/config/stormy16/stormy16.c
@@ -2441,8 +2441,7 @@ combine_bnp (rtx insn)
if (reg_mentioned_p (reg, and_insn))
return;
- if (GET_CODE (and_insn) != NOTE
- && GET_CODE (and_insn) != INSN)
+ if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
return;
}
}
@@ -2461,8 +2460,7 @@ combine_bnp (rtx insn)
if (reg_mentioned_p (reg, and_insn))
return;
- if (GET_CODE (and_insn) != NOTE
- && GET_CODE (and_insn) != INSN)
+ if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
return;
}
@@ -2486,8 +2484,7 @@ combine_bnp (rtx insn)
break;
if (reg_mentioned_p (reg, shift)
- || (GET_CODE (shift) != NOTE
- && GET_CODE (shift) != INSN))
+ || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
{
shift = NULL_RTX;
break;
@@ -2534,8 +2531,7 @@ combine_bnp (rtx insn)
if (reg_mentioned_p (reg, load))
return;
- if (GET_CODE (load) != NOTE
- && GET_CODE (load) != INSN)
+ if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
return;
}
if (!load)
diff --git a/gcc/config/v850/v850.c b/gcc/config/v850/v850.c
index 67cc2c2..4a746aa 100644
--- a/gcc/config/v850/v850.c
+++ b/gcc/config/v850/v850.c
@@ -1133,13 +1133,13 @@ Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, end
IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
INSN_UID (first_insn), INSN_UID (last_insn));
- if (GET_CODE (first_insn) == NOTE)
+ if (NOTE_P (first_insn))
first_insn = next_nonnote_insn (first_insn);
last_insn = next_nonnote_insn (last_insn);
for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
{
rtx pattern = single_set (insn);
@@ -1199,7 +1199,7 @@ Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, end
/* Optimize back to back cases of ep <- r1 & r1 <- ep. */
insn = prev_nonnote_insn (first_insn);
- if (insn && GET_CODE (insn) == INSN
+ if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& SET_DEST (PATTERN (insn)) == *p_ep
&& SET_SRC (PATTERN (insn)) == *p_r1)
diff --git a/gcc/config/xtensa/xtensa.c b/gcc/config/xtensa/xtensa.c
index 45929ad..7faf7de 100644
--- a/gcc/config/xtensa/xtensa.c
+++ b/gcc/config/xtensa/xtensa.c
@@ -1650,7 +1650,7 @@ xtensa_emit_loop_end (rtx insn, rtx *operands)
{
rtx body = PATTERN (insn);
- if (GET_CODE (body) == JUMP_INSN)
+ if (JUMP_P (body))
{
output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
done = 1;