aboutsummaryrefslogtreecommitdiff
path: root/gcc/sel-sched.c
diff options
context:
space:
mode:
authorAndrey Belevantsev <abel@ispras.ru>2018-04-09 12:08:28 +0300
committerAndrey Belevantsev <abel@gcc.gnu.org>2018-04-09 12:08:28 +0300
commit33bacbcba534374b3e2c7693ae9147b96540f43f (patch)
tree076894db443b34e02d669dd373fb107c98aff440 /gcc/sel-sched.c
parenta0873952aa3499abd6f7b49cf9f876397c3b631b (diff)
downloadgcc-33bacbcba534374b3e2c7693ae9147b96540f43f.zip
gcc-33bacbcba534374b3e2c7693ae9147b96540f43f.tar.gz
gcc-33bacbcba534374b3e2c7693ae9147b96540f43f.tar.bz2
re PR rtl-optimization/83530 (ICE in reset_sched_cycles_in_current_ebb, at sel-sched.c:7150)
PR rtl-optimization/83530 * sel-sched.c (force_next_insn): New global variable. (remove_insn_for_debug): When force_next_insn is true, also leave only next insn in the ready list. (sel_sched_region): When the region wasn't scheduled, make another pass over it with force_next_insn set to 1. * gcc.dg/pr83530.c: New test. From-SVN: r259228
Diffstat (limited to 'gcc/sel-sched.c')
-rw-r--r--gcc/sel-sched.c18
1 files changed, 14 insertions, 4 deletions
diff --git a/gcc/sel-sched.c b/gcc/sel-sched.c
index 76092f9..cd29df3 100644
--- a/gcc/sel-sched.c
+++ b/gcc/sel-sched.c
@@ -5004,12 +5004,16 @@ remove_temp_moveop_nops (bool full_tidying)
distinguishing between bookkeeping copies and original insns. */
static int max_uid_before_move_op = 0;
+/* When true, we're always scheduling next insn on the already scheduled code
+ to get the right insn data for the following bundling or other passes. */
+static int force_next_insn = 0;
+
/* Remove from AV_VLIW_P all instructions but next when debug counter
tells us so. Next instruction is fetched from BNDS. */
static void
remove_insns_for_debug (blist_t bnds, av_set_t *av_vliw_p)
{
- if (! dbg_cnt (sel_sched_insn_cnt))
+ if (! dbg_cnt (sel_sched_insn_cnt) || force_next_insn)
/* Leave only the next insn in av_vliw. */
{
av_set_iterator av_it;
@@ -7641,9 +7645,15 @@ sel_sched_region (int rgn)
if (schedule_p)
sel_sched_region_1 ();
else
- /* Force initialization of INSN_SCHED_CYCLEs for correct bundling. */
- reset_sched_cycles_p = true;
-
+ {
+ /* Schedule always selecting the next insn to make the correct data
+ for bundling or other later passes. */
+ pipelining_p = false;
+ force_next_insn = 1;
+ sel_sched_region_1 ();
+ force_next_insn = 0;
+ }
+ reset_sched_cycles_p = pipelining_p;
sel_region_finish (reset_sched_cycles_p);
}