aboutsummaryrefslogtreecommitdiff
path: root/gcc/ifcvt.c
diff options
context:
space:
mode:
authorDavid Malcolm <dmalcolm@redhat.com>2014-09-09 16:34:56 +0000
committerDavid Malcolm <dmalcolm@gcc.gnu.org>2014-09-09 16:34:56 +0000
commit647d790d2f08bf28e5972644efd563057ff7634d (patch)
tree0f16f0403adbc835cd52868e02e4cb745c3acfc9 /gcc/ifcvt.c
parentb677236af0dc857c3f03ed724e77b4961e4c0b9c (diff)
downloadgcc-647d790d2f08bf28e5972644efd563057ff7634d.zip
gcc-647d790d2f08bf28e5972644efd563057ff7634d.tar.gz
gcc-647d790d2f08bf28e5972644efd563057ff7634d.tar.bz2
recog_memoized works on an rtx_insn *
gcc/ChangeLog: 2014-09-09 David Malcolm <dmalcolm@redhat.com> * caller-save.c (rtx saveinsn): Strengthen this variable from rtx to rtx_insn *. (restinsn): Likewise. * config/aarch64/aarch64-protos.h (aarch64_simd_attr_length_move): Likewise for param. * config/aarch64/aarch64.c (aarch64_simd_attr_length_move): Likewise. * config/arc/arc-protos.h (arc_adjust_insn_length): Likewise for first param. (arc_hazard): Likewise for both params. * config/arc/arc.c (arc600_corereg_hazard): Likewise, adding checked casts to rtx_sequence * and uses of the insn method for type-safety. (arc_hazard): Strengthen both params from rtx to rtx_insn *. (arc_adjust_insn_length): Likewise for param "insn". (struct insn_length_parameters_s): Likewise for first param of "get_variants" callback field. (arc_get_insn_variants): Likewise for first param and local "inner". Replace a check of GET_CODE with a dyn_cast to rtx_sequence *, using methods for type-safety and clarity. * config/arc/arc.h (ADJUST_INSN_LENGTH): Use casts to rtx_sequence * and uses of the insn method for type-safety when invoking arc_adjust_insn_length. * config/arm/arm-protos.h (arm_attr_length_move_neon): Likewise for param. (arm_address_offset_is_imm): Likewise. (struct tune_params): Likewise for params 1 and 3 of the "sched_adjust_cost" callback field. * config/arm/arm.c (cortex_a9_sched_adjust_cost): Likewise for params 1 and 3 ("insn" and "dep"). (xscale_sched_adjust_cost): Likewise. (fa726te_sched_adjust_cost): Likewise. (cortexa7_older_only): Likewise for param "insn". (cortexa7_younger): Likewise. (arm_attr_length_move_neon): Likewise. (arm_address_offset_is_imm): Likewise. * config/avr/avr-protos.h (avr_notice_update_cc): Likewise. * config/avr/avr.c (avr_notice_update_cc): Likewise. * config/bfin/bfin.c (hwloop_pattern_reg): Likewise. (workaround_speculation): Likewise for local "last_condjump". * config/c6x/c6x.c (shadow_p): Likewise for param "insn". (shadow_or_blockage_p): Likewise. (get_unit_reqs): Likewise. (get_unit_operand_masks): Likewise. (c6x_registers_update): Likewise. (returning_call_p): Likewise. (can_use_callp): Likewise. (convert_to_callp): Likewise. (find_last_same_clock): Likwise for local "t". (reorg_split_calls): Likewise for local "shadow". (hwloop_pattern_reg): Likewise for param "insn". * config/frv/frv-protos.h (frv_final_prescan_insn): Likewise. * config/frv/frv.c (frv_final_prescan_insn): Likewise. (frv_extract_membar): Likewise. (frv_optimize_membar_local): Strengthen param "last_membar" from rtx * to rtx_insn **. (frv_optimize_membar_global): Strengthen param "membar" from rtx to rtx_insn *. (frv_optimize_membar): Strengthen local "last_membar" from rtx * to rtx_insn **. * config/ia64/ia64-protos.h (ia64_st_address_bypass_p): Strengthen both params from rtx to rtx_insn *. (ia64_ld_address_bypass_p): Likewise. * config/ia64/ia64.c (ia64_safe_itanium_class): Likewise for param "insn". (ia64_safe_type): Likewise. (group_barrier_needed): Likewise. (safe_group_barrier_needed): Likewise. (ia64_single_set): Likewise. (is_load_p): Likewise. (record_memory_reference): Likewise. (get_mode_no_for_insn): Likewise. (important_for_bundling_p): Likewise. (unknown_for_bundling_p): Likewise. (ia64_st_address_bypass_p): Likewise for both params. (ia64_ld_address_bypass_p): Likewise. (expand_vselect): Introduce new local rtx_insn * "insn", using it in place of rtx "x" after the emit_insn call. * config/i386/i386-protos.h (x86_extended_QIreg_mentioned_p): Strengthen param from rtx to rtx_insn *. (ix86_agi_dependent): Likewise for both params. (ix86_attr_length_immediate_default): Likewise for param 1. (ix86_attr_length_address_default): Likewise for param. (ix86_attr_length_vex_default): Likewise for param 1. * config/i386/i386.c (ix86_attr_length_immediate_default): Likewise for param "insn". (ix86_attr_length_address_default): Likewise. (ix86_attr_length_vex_default): Likewise. (ix86_agi_dependent): Likewise for both params. (x86_extended_QIreg_mentioned_p): Likewise for param "insn". (vselect_insn): Likewise for this variable. * config/m68k/m68k-protos.h (m68k_sched_attr_opx_type): Likewise for param 1. (m68k_sched_attr_opy_type): Likewise. * config/m68k/m68k.c (sched_get_operand): Likewise. (sched_attr_op_type): Likewise. (m68k_sched_attr_opx_type): Likewise. (m68k_sched_attr_opy_type): Likewise. (sched_get_reg_operand): Likewise. (sched_get_mem_operand): Likewise. (m68k_sched_address_bypass_p): Likewise for both params. (sched_get_indexed_address_scale): Likewise. (m68k_sched_indexed_address_bypass_p): Likewise. * config/m68k/m68k.h (m68k_sched_address_bypass_p): Likewise. (m68k_sched_indexed_address_bypass_p): Likewise. * config/mep/mep.c (mep_jmp_return_reorg): Strengthen locals "label", "ret" from rtx to rtx_insn *, adding a checked cast and removing another. * config/mips/mips-protos.h (mips_linked_madd_p): Strengthen both params from rtx to rtx_insn *. (mips_fmadd_bypass): Likewise. * config/mips/mips.c (mips_fmadd_bypass): Likewise. (mips_linked_madd_p): Likewise. (mips_macc_chains_last_hilo): Likewise for this variable. (mips_macc_chains_record): Likewise for param. (vr4130_last_insn): Likewise for this variable. (vr4130_swap_insns_p): Likewise for both params. (mips_ls2_variable_issue): Likewise for param. (mips_need_noat_wrapper_p): Likewise for param "insn". (mips_expand_vselect): Add a new local rtx_insn * "insn", using it in place of "x" after the emit_insn. * config/pa/pa-protos.h (pa_fpstore_bypass_p): Strengthen both params from rtx to rtx_insn *. * config/pa/pa.c (pa_fpstore_bypass_p): Likewise. (pa_combine_instructions): Introduce local "par" for result of gen_rtx_PARALLEL, moving decl and usage of new_rtx for after call to make_insn_raw. (pa_can_combine_p): Strengthen param "new_rtx" from rtx to rtx_insn *. * config/rl78/rl78.c (insn_ok_now): Likewise for param "insn". (rl78_alloc_physical_registers_op1): Likewise. (rl78_alloc_physical_registers_op2): Likewise. (rl78_alloc_physical_registers_ro1): Likewise. (rl78_alloc_physical_registers_cmp): Likewise. (rl78_alloc_physical_registers_umul): Likewise. (rl78_alloc_address_registers_macax): Likewise. (rl78_alloc_physical_registers): Likewise for locals "insn", "curr". * config/s390/predicates.md (execute_operation): Likewise for local "insn". * config/s390/s390-protos.h (s390_agen_dep_p): Likewise for both params. * config/s390/s390.c (s390_safe_attr_type): Likewise for param. (addr_generation_dependency_p): Likewise for param "insn". (s390_agen_dep_p): Likewise for both params. (s390_fpload_toreg): Likewise for param "insn". * config/sh/sh-protos.h (sh_loop_align): Likewise for param. * config/sh/sh.c (sh_loop_align): Likewise for param and local "next". * config/sh/sh.md (define_peephole2): Likewise for local "insn2". * config/sh/sh_treg_combine.cc (sh_treg_combine::make_inv_ccreg_insn): Likewise for return type and local "i". (sh_treg_combine::try_eliminate_cstores): Likewise for local "i". * config/stormy16/stormy16.c (combine_bnp): Likewise for locals "and_insn", "load", "shift". * config/tilegx/tilegx.c (match_pcrel_step2): Likewise for param "insn". * final.c (final_scan_insn): Introduce local rtx_insn * "other" for XEXP (note, 0) of the REG_CC_SETTER note. (cleanup_subreg_operands): Strengthen param "insn" from rtx to rtx_insn *, eliminating a checked cast made redundant by this. * gcse.c (process_insert_insn): Strengthen local "insn" from rtx to rtx_insn *. * genattr.c (main): When writing out the prototype to const_num_delay_slots, strengthen the param from rtx to rtx_insn *. * genattrtab.c (write_const_num_delay_slots): Likewise when writing out the implementation of const_num_delay_slots. * hw-doloop.h (struct hw_doloop_hooks): Strengthen the param "insn" of callback field "end_pattern_reg" from rtx to rtx_insn *. * ifcvt.c (noce_emit_store_flag): Eliminate local rtx "tmp" in favor of new rtx locals "src" and "set" and new local rtx_insn * "insn" and "seq". (noce_emit_move_insn): Strengthen locals "seq" and "insn" from rtx to rtx_insn *. (noce_emit_cmove): Eliminate local rtx "tmp" in favor of new rtx locals "cond", "if_then_else", "set" and new rtx_insn * locals "insn" and "seq". (noce_try_cmove_arith): Strengthen locals "insn_a" and "insn_b", "last" from rtx to rtx_insn *. Likewise for a local "tmp", renaming to "tmp_insn". Eliminate the other local rtx "tmp" from the top-level scope, replacing with new more tightly-scoped rtx locals "reg", "pat", "mem" and rtx_insn * "insn", "copy_of_a", "new_insn", "copy_of_insn_b", and make local rtx "set" more tightly-scoped. * ira-int.h (ira_setup_alts): Strengthen param "insn" from rtx to rtx_insn *. * ira.c (setup_prohibited_mode_move_regs): Likewise for local "move_insn". (ira_setup_alts): Likewise for param "insn". * lra-constraints.c (emit_inc): Likewise for local "add_insn". * lra.c (emit_add3_insn): Split local rtx "insn" in two, an rtx and an rtx_insn *. (lra_emit_add): Eliminate top-level local rtx "insn" in favor of new more-tightly scoped rtx locals "add3_insn", "insn", "add2_insn" and rtx_insn * "move_insn". * postreload-gcse.c (eliminate_partially_redundant_load): Add checked cast on result of gen_move_insn when invoking extract_insn. * recog.c (insn_invalid_p): Strengthen param "insn" from rtx to rtx_insn *. (verify_changes): Add a checked cast on "object" when invoking insn_invalid_p. (extract_insn_cached): Strengthen param "insn" from rtx to rtx_insn *. (extract_constrain_insn_cached): Likewise. (extract_insn): Likewise. * recog.h (insn_invalid_p): Likewise for param 1. (recog_memoized): Likewise for param. (extract_insn): Likewise. (extract_constrain_insn_cached): Likewise. (extract_insn_cached): Likewise. * reload.c (can_reload_into): Likewise for local "test_insn". * reload.h (cleanup_subreg_operands): Likewise for param. * reload1.c (emit_insn_if_valid_for_reload): Rename param from "insn" to "pat", reintroducing "insn" as an rtx_insn * on the result of emit_insn. Remove a checked cast made redundant by this change. * sel-sched-ir.c (sel_insn_rtx_cost): Strengthen param "insn" from rtx to rtx_insn *. * sel-sched.c (get_reg_class): Likewise. From-SVN: r215087
Diffstat (limited to 'gcc/ifcvt.c')
-rw-r--r--gcc/ifcvt.c98
1 files changed, 51 insertions, 47 deletions
diff --git a/gcc/ifcvt.c b/gcc/ifcvt.c
index ed30a59..18e8df6 100644
--- a/gcc/ifcvt.c
+++ b/gcc/ifcvt.c
@@ -861,20 +861,18 @@ noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
if ((if_info->cond_earliest == if_info->jump || cond_complex)
&& (normalize == 0 || STORE_FLAG_VALUE == normalize))
{
- rtx tmp;
-
- tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
+ rtx src = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
XEXP (cond, 1));
- tmp = gen_rtx_SET (VOIDmode, x, tmp);
+ rtx set = gen_rtx_SET (VOIDmode, x, src);
start_sequence ();
- tmp = emit_insn (tmp);
+ rtx_insn *insn = emit_insn (set);
- if (recog_memoized (tmp) >= 0)
+ if (recog_memoized (insn) >= 0)
{
- tmp = get_insns ();
+ rtx_insn *seq = get_insns ();
end_sequence ();
- emit_insn (tmp);
+ emit_insn (seq);
if_info->cond_earliest = if_info->jump;
@@ -906,7 +904,8 @@ noce_emit_move_insn (rtx x, rtx y)
if (GET_CODE (x) != STRICT_LOW_PART)
{
- rtx seq, insn, target;
+ rtx_insn *seq, *insn;
+ rtx target;
optab ot;
start_sequence ();
@@ -1417,20 +1416,19 @@ noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
if (if_info->cond_earliest == if_info->jump)
{
- rtx tmp;
-
- tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
- tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
- tmp = gen_rtx_SET (VOIDmode, x, tmp);
+ rtx cond = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
+ rtx if_then_else = gen_rtx_IF_THEN_ELSE (GET_MODE (x),
+ cond, vtrue, vfalse);
+ rtx set = gen_rtx_SET (VOIDmode, x, if_then_else);
start_sequence ();
- tmp = emit_insn (tmp);
+ rtx_insn *insn = emit_insn (set);
- if (recog_memoized (tmp) >= 0)
+ if (recog_memoized (insn) >= 0)
{
- tmp = get_insns ();
+ rtx_insn *seq = get_insns ();
end_sequence ();
- emit_insn (tmp);
+ emit_insn (seq);
return x;
}
@@ -1563,11 +1561,12 @@ noce_try_cmove_arith (struct noce_if_info *if_info)
rtx b = if_info->b;
rtx x = if_info->x;
rtx orig_a, orig_b;
- rtx insn_a, insn_b;
- rtx tmp, target;
+ rtx_insn *insn_a, *insn_b;
+ rtx target;
int is_mem = 0;
int insn_cost;
enum rtx_code code;
+ rtx_insn *ifcvt_seq;
/* A conditional move from two memory sources is equivalent to a
conditional on their addresses followed by a load. Don't do this
@@ -1637,9 +1636,11 @@ noce_try_cmove_arith (struct noce_if_info *if_info)
if (reversep)
{
+ rtx tmp;
+ rtx_insn *tmp_insn;
code = reversed_comparison_code (if_info->cond, if_info->jump);
tmp = a, a = b, b = tmp;
- tmp = insn_a, insn_a = insn_b, insn_b = tmp;
+ tmp_insn = insn_a, insn_a = insn_b, insn_b = tmp_insn;
}
}
@@ -1654,44 +1655,46 @@ noce_try_cmove_arith (struct noce_if_info *if_info)
This is of course not possible in the IS_MEM case. */
if (! general_operand (a, GET_MODE (a)))
{
- rtx set;
+ rtx_insn *insn;
if (is_mem)
{
- tmp = gen_reg_rtx (GET_MODE (a));
- tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
+ rtx reg = gen_reg_rtx (GET_MODE (a));
+ insn = emit_insn (gen_rtx_SET (VOIDmode, reg, a));
}
else if (! insn_a)
goto end_seq_and_fail;
else
{
a = gen_reg_rtx (GET_MODE (a));
- tmp = copy_rtx (insn_a);
- set = single_set (tmp);
+ rtx_insn *copy_of_a = as_a <rtx_insn *> (copy_rtx (insn_a));
+ rtx set = single_set (copy_of_a);
SET_DEST (set) = a;
- tmp = emit_insn (PATTERN (tmp));
+ insn = emit_insn (PATTERN (copy_of_a));
}
- if (recog_memoized (tmp) < 0)
+ if (recog_memoized (insn) < 0)
goto end_seq_and_fail;
}
if (! general_operand (b, GET_MODE (b)))
{
- rtx set, last;
+ rtx pat;
+ rtx_insn *last;
+ rtx_insn *new_insn;
if (is_mem)
{
- tmp = gen_reg_rtx (GET_MODE (b));
- tmp = gen_rtx_SET (VOIDmode, tmp, b);
+ rtx reg = gen_reg_rtx (GET_MODE (b));
+ pat = gen_rtx_SET (VOIDmode, reg, b);
}
else if (! insn_b)
goto end_seq_and_fail;
else
{
b = gen_reg_rtx (GET_MODE (b));
- tmp = copy_rtx (insn_b);
- set = single_set (tmp);
+ rtx_insn *copy_of_insn_b = as_a <rtx_insn *> (copy_rtx (insn_b));
+ rtx set = single_set (copy_of_insn_b);
SET_DEST (set) = b;
- tmp = PATTERN (tmp);
+ pat = PATTERN (copy_of_insn_b);
}
/* If insn to set up A clobbers any registers B depends on, try to
@@ -1700,14 +1703,14 @@ noce_try_cmove_arith (struct noce_if_info *if_info)
last = get_last_insn ();
if (last && modified_in_p (orig_b, last))
{
- tmp = emit_insn_before (tmp, get_insns ());
- if (modified_in_p (orig_a, tmp))
+ new_insn = emit_insn_before (pat, get_insns ());
+ if (modified_in_p (orig_a, new_insn))
goto end_seq_and_fail;
}
else
- tmp = emit_insn (tmp);
+ new_insn = emit_insn (pat);
- if (recog_memoized (tmp) < 0)
+ if (recog_memoized (new_insn) < 0)
goto end_seq_and_fail;
}
@@ -1720,29 +1723,30 @@ noce_try_cmove_arith (struct noce_if_info *if_info)
/* If we're handling a memory for above, emit the load now. */
if (is_mem)
{
- tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
+ rtx mem = gen_rtx_MEM (GET_MODE (if_info->x), target);
/* Copy over flags as appropriate. */
if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
- MEM_VOLATILE_P (tmp) = 1;
+ MEM_VOLATILE_P (mem) = 1;
if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
- set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
- set_mem_align (tmp,
+ set_mem_alias_set (mem, MEM_ALIAS_SET (if_info->a));
+ set_mem_align (mem,
MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
- set_mem_addr_space (tmp, MEM_ADDR_SPACE (if_info->a));
+ set_mem_addr_space (mem, MEM_ADDR_SPACE (if_info->a));
- noce_emit_move_insn (if_info->x, tmp);
+ noce_emit_move_insn (if_info->x, mem);
}
else if (target != x)
noce_emit_move_insn (x, target);
- tmp = end_ifcvt_sequence (if_info);
- if (!tmp)
+ ifcvt_seq = end_ifcvt_sequence (if_info);
+ if (!ifcvt_seq)
return FALSE;
- emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATION (if_info->insn_a));
+ emit_insn_before_setloc (ifcvt_seq, if_info->jump,
+ INSN_LOCATION (if_info->insn_a));
return TRUE;
end_seq_and_fail: