diff options
author | Richard Sandiford <richard.sandiford@arm.com> | 2019-09-30 16:21:00 +0000 |
---|---|---|
committer | Richard Sandiford <rsandifo@gcc.gnu.org> | 2019-09-30 16:21:00 +0000 |
commit | a1e6ee38e708ef2bdef4dfbb99473344bd56fa2f (patch) | |
tree | f7505539096aba2d8bb5a8204b91d5d1e299c13c /gcc/lra-remat.c | |
parent | 5c64181d62bb816b8c2c91e251cee7e2b597f244 (diff) | |
download | gcc-a1e6ee38e708ef2bdef4dfbb99473344bd56fa2f.zip gcc-a1e6ee38e708ef2bdef4dfbb99473344bd56fa2f.tar.gz gcc-a1e6ee38e708ef2bdef4dfbb99473344bd56fa2f.tar.bz2 |
Remove global call sets: LRA
lra_reg has an actual_call_used_reg_set field that is only used during
inheritance. This in turn required a special lra_create_live_ranges
pass for flag_ipa_ra to set up this field. This patch instead makes
the inheritance code do its own live register tracking, using the
same ABI-mask-and-clobber-set pair as for IRA.
Tracking ABIs simplifies (and cheapens) the logic in lra-lives.c and
means we no longer need a separate path for -fipa-ra. It also means
we can remove TARGET_RETURN_CALL_WITH_MAX_CLOBBERS.
The patch also strengthens the sanity check in lra_assigns so that
we check that reg_renumber is consistent with the whole conflict set,
not just the call-clobbered registers.
2019-09-30 Richard Sandiford <richard.sandiford@arm.com>
gcc/
* target.def (return_call_with_max_clobbers): Delete.
* doc/tm.texi.in (TARGET_RETURN_CALL_WITH_MAX_CLOBBERS): Delete.
* doc/tm.texi: Regenerate.
* config/aarch64/aarch64.c (aarch64_return_call_with_max_clobbers)
(TARGET_RETURN_CALL_WITH_MAX_CLOBBERS): Delete.
* lra-int.h (lra_reg::actual_call_used_reg_set): Delete.
(lra_reg::call_insn): Delete.
* lra.c: Include function-abi.h.
(initialize_lra_reg_info_element): Don't initialize the fields above.
(lra): Use crtl->abi to test whether the current function needs to
save a register in the prologue. Remove special pre-inheritance
lra_create_live_ranges pass for flag_ipa_ra.
* lra-assigns.c: Include function-abi.h
(find_hard_regno_for_1): Use crtl->abi to test whether the current
function needs to save a register in the prologue.
(lra_assign): Assert that registers aren't allocated to a
conflicting register, rather than checking only for overlaps
with call_used_or_fixed_regs. Do this even for flag_ipa_ra,
and for registers that are not live across a call.
* lra-constraints.c (last_call_for_abi): New variable.
(full_and_partial_call_clobbers): Likewise.
(setup_next_usage_insn): Remove the register from
full_and_partial_call_clobbers.
(need_for_call_save_p): Use call_clobbered_in_region_p to test
whether the register needs a caller save.
(need_for_split_p): Use full_and_partial_reg_clobbers instead
of call_used_or_fixed_regs.
(inherit_in_ebb): Initialize and maintain last_call_for_abi and
full_and_partial_call_clobbers.
* lra-lives.c (check_pseudos_live_through_calls): Replace
last_call_used_reg_set and call_insn arguments with an abi argument.
Remove handling of lra_reg::call_insn. Use function_abi::mode_clobbers
as the set of conflicting registers.
(calls_have_same_clobbers_p): Delete.
(process_bb_lives): Track the ABI of the last call instead of an
insn/HARD_REG_SET pair. Update calls to
check_pseudos_live_through_calls. Use eh_edge_abi to calculate
the set of registers that could be clobbered by an EH edge.
Include partially-clobbered as well as fully-clobbered registers.
(lra_create_live_ranges_1): Don't initialize lra_reg::call_insn.
* lra-remat.c: Include function-abi.h.
(call_used_regs_arr_len, call_used_regs_arr): Delete.
(set_bb_regs): Use insn_callee_abi to get the set of call-clobbered
registers and bitmap_view to combine them into dead_regs.
(call_used_input_regno_present_p): Take a function_abi argument
and use it to test whether a register is call-clobbered.
(calculate_gen_cands): Use insn_callee_abi to get the ABI of the
call insn target. Update tje call to call_used_input_regno_present_p.
(do_remat): Likewise.
(lra_remat): Remove the initialization of call_used_regs_arr_len
and call_used_regs_arr.
From-SVN: r276327
Diffstat (limited to 'gcc/lra-remat.c')
-rw-r--r-- | gcc/lra-remat.c | 67 |
1 files changed, 34 insertions, 33 deletions
diff --git a/gcc/lra-remat.c b/gcc/lra-remat.c index ea6e817..ee7ebf6 100644 --- a/gcc/lra-remat.c +++ b/gcc/lra-remat.c @@ -65,16 +65,11 @@ along with GCC; see the file COPYING3. If not see #include "recog.h" #include "lra.h" #include "lra-int.h" +#include "function-abi.h" /* Number of candidates for rematerialization. */ static unsigned int cands_num; -/* The following is used for representation of call_used_or_fixed_regs in - form array whose elements are hard register numbers with nonzero bit - in CALL_USED_OR_FIXED_REGS. */ -static int call_used_regs_arr_len; -static int call_used_regs_arr[FIRST_PSEUDO_REGISTER]; - /* Bitmap used for different calculations. */ static bitmap_head temp_bitmap; @@ -633,9 +628,12 @@ set_bb_regs (basic_block bb, rtx_insn *insn) bitmap_set_bit (&subreg_regs, regno); } if (CALL_P (insn)) - for (int i = 0; i < call_used_regs_arr_len; i++) - bitmap_set_bit (&get_remat_bb_data (bb)->dead_regs, - call_used_regs_arr[i]); + { + /* Partially-clobbered registers might still be live. */ + HARD_REG_SET clobbers = insn_callee_abi (insn).full_reg_clobbers (); + bitmap_ior_into (&get_remat_bb_data (bb)->dead_regs, + bitmap_view<HARD_REG_SET> (clobbers)); + } } /* Calculate changed_regs and dead_regs for each BB. */ @@ -698,7 +696,7 @@ reg_overlap_for_remat_p (lra_insn_reg *reg, rtx_insn *insn) /* Return true if a call used register is an input operand of INSN. */ static bool -call_used_input_regno_present_p (rtx_insn *insn) +call_used_input_regno_present_p (const function_abi &abi, rtx_insn *insn) { int iter; lra_insn_recog_data_t id = lra_get_insn_recog_data (insn); @@ -709,8 +707,9 @@ call_used_input_regno_present_p (rtx_insn *insn) for (reg = (iter == 0 ? id->regs : static_id->hard_regs); reg != NULL; reg = reg->next) - if (reg->type == OP_IN && reg->regno < FIRST_PSEUDO_REGISTER - && TEST_HARD_REG_BIT (call_used_or_fixed_regs, reg->regno)) + if (reg->type == OP_IN + && reg->regno < FIRST_PSEUDO_REGISTER + && abi.clobbers_reg_p (reg->biggest_mode, reg->regno)) return true; return false; } @@ -799,18 +798,21 @@ calculate_gen_cands (void) } if (CALL_P (insn)) - EXECUTE_IF_SET_IN_BITMAP (gen_insns, 0, uid, bi) - { - rtx_insn *insn2 = lra_insn_recog_data[uid]->insn; + { + function_abi callee_abi = insn_callee_abi (insn); + EXECUTE_IF_SET_IN_BITMAP (gen_insns, 0, uid, bi) + { + rtx_insn *insn2 = lra_insn_recog_data[uid]->insn; - cand = insn_to_cand[INSN_UID (insn2)]; - gcc_assert (cand != NULL); - if (call_used_input_regno_present_p (insn2)) - { - bitmap_clear_bit (gen_cands, cand->index); - bitmap_set_bit (&temp_bitmap, uid); - } - } + cand = insn_to_cand[INSN_UID (insn2)]; + gcc_assert (cand != NULL); + if (call_used_input_regno_present_p (callee_abi, insn2)) + { + bitmap_clear_bit (gen_cands, cand->index); + bitmap_set_bit (&temp_bitmap, uid); + } + } + } bitmap_and_compl_into (gen_insns, &temp_bitmap); cand = insn_to_cand[INSN_UID (insn)]; @@ -1205,13 +1207,16 @@ do_remat (void) } if (CALL_P (insn)) - EXECUTE_IF_SET_IN_BITMAP (avail_cands, 0, cid, bi) - { - cand = all_cands[cid]; + { + function_abi callee_abi = insn_callee_abi (insn); + EXECUTE_IF_SET_IN_BITMAP (avail_cands, 0, cid, bi) + { + cand = all_cands[cid]; - if (call_used_input_regno_present_p (cand->insn)) - bitmap_set_bit (&temp_bitmap, cand->index); - } + if (call_used_input_regno_present_p (callee_abi, cand->insn)) + bitmap_set_bit (&temp_bitmap, cand->index); + } + } bitmap_and_compl_into (avail_cands, &temp_bitmap); @@ -1307,10 +1312,6 @@ lra_remat (void) insn_to_cand_activation = XCNEWVEC (cand_t, get_max_uid ()); regno_cands = XCNEWVEC (cand_t, max_regno); all_cands.create (8000); - call_used_regs_arr_len = 0; - for (int i = 0; i < FIRST_PSEUDO_REGISTER; i++) - if (call_used_or_fixed_reg_p (i)) - call_used_regs_arr[call_used_regs_arr_len++] = i; initiate_cand_table (); create_remat_bb_data (); bitmap_initialize (&temp_bitmap, ®_obstack); |