Commit d7f672ec by Andrey Belevantsev Committed by Andrey Belevantsev

re PR rtl-optimization/45352 (ICE: in reset_sched_cycles_in_current_ebb, at sel-sched.c:7058)

	PR rtl-optimization/45352
	PR rtl-optimization/46521
	PR rtl-optimization/46522
	* sel-sched.c (reset_sched_cycles_in_current_ebb): Recheck the DFA state
	on the last iteration of the advancing loop.
	(sel_sched_region_1): Propagate the rescheduling bit to the next block
	also for empty blocks.

	* gcc.dg/pr46521.c: New.
	* gcc.dg/pr46522.c: New.

From-SVN: r168164
parent d7ab2512
2010-12-22 Andrey Belevantsev <abel@ispras.ru>
PR rtl-optimization/45352
PR rtl-optimization/46521
PR rtl-optimization/46522
* sel-sched.c (reset_sched_cycles_in_current_ebb): Recheck the DFA state
on the last iteration of the advancing loop.
(sel_sched_region_1): Propagate the rescheduling bit to the next block
also for empty blocks.
2010-12-22 Alexandre Oliva <aoliva@redhat.com> 2010-12-22 Alexandre Oliva <aoliva@redhat.com>
PR debug/46724 PR debug/46724
...@@ -7053,7 +7053,17 @@ reset_sched_cycles_in_current_ebb (void) ...@@ -7053,7 +7053,17 @@ reset_sched_cycles_in_current_ebb (void)
&& haifa_cost > 0 && haifa_cost > 0
&& estimate_insn_cost (insn, curr_state) == 0) && estimate_insn_cost (insn, curr_state) == 0)
break; break;
}
/* When the data dependency stall is longer than the DFA stall,
it could be that after the longer stall the insn will again
become unavailable to the DFA restrictions. Looks strange
but happens e.g. on x86-64. So recheck DFA on the last
iteration. */
if (after_stall
&& real_insn
&& haifa_cost == 0)
haifa_cost = estimate_insn_cost (insn, curr_state);
}
haifa_clock += i; haifa_clock += i;
if (sched_verbose >= 2) if (sched_verbose >= 2)
...@@ -7504,21 +7514,23 @@ sel_sched_region_1 (void) ...@@ -7504,21 +7514,23 @@ sel_sched_region_1 (void)
{ {
basic_block bb = EBB_FIRST_BB (i); basic_block bb = EBB_FIRST_BB (i);
if (sel_bb_empty_p (bb))
{
bitmap_clear_bit (blocks_to_reschedule, bb->index);
continue;
}
if (bitmap_bit_p (blocks_to_reschedule, bb->index)) if (bitmap_bit_p (blocks_to_reschedule, bb->index))
{ {
if (! bb_ends_ebb_p (bb))
bitmap_set_bit (blocks_to_reschedule, bb_next_bb (bb)->index);
if (sel_bb_empty_p (bb))
{
bitmap_clear_bit (blocks_to_reschedule, bb->index);
continue;
}
clear_outdated_rtx_info (bb); clear_outdated_rtx_info (bb);
if (sel_insn_is_speculation_check (BB_END (bb)) if (sel_insn_is_speculation_check (BB_END (bb))
&& JUMP_P (BB_END (bb))) && JUMP_P (BB_END (bb)))
bitmap_set_bit (blocks_to_reschedule, bitmap_set_bit (blocks_to_reschedule,
BRANCH_EDGE (bb)->dest->index); BRANCH_EDGE (bb)->dest->index);
} }
else if (INSN_SCHED_TIMES (sel_bb_head (bb)) <= 0) else if (! sel_bb_empty_p (bb)
&& INSN_SCHED_TIMES (sel_bb_head (bb)) <= 0)
bitmap_set_bit (blocks_to_reschedule, bb->index); bitmap_set_bit (blocks_to_reschedule, bb->index);
} }
......
2010-12-22 Andrey Belevantsev <abel@ispras.ru>
PR rtl-optimization/45352
PR rtl-optimization/46521
PR rtl-optimization/46522
* gcc.dg/pr46521.c: New.
* gcc.dg/pr46522.c: New.
2010-12-21 Ian Lance Taylor <iant@google.com> 2010-12-21 Ian Lance Taylor <iant@google.com>
* go.test/go-test.exp: Clear runtests around invocation of * go.test/go-test.exp: Clear runtests around invocation of
......
/* { dg-do compile { target powerpc*-*-* ia64-*-* x86_64-*-* } } */
/* { dg-options "-Os -fselective-scheduling2 -fsel-sched-pipelining -fprofile-generate -fno-early-inlining" } */
static void bmp_iter_next (int *bi)
{
*bi >>= 1;
}
int bmp_iter_set (int *, int);
void bitmap_clear (void);
void bitmap_initialize_stat (void);
void df_md_alloc (int bi, int bb_index, int bb_info)
{
for (; bmp_iter_set (&bi, bb_index); bmp_iter_next (&bi))
if (bb_info)
bitmap_clear ();
else
bitmap_initialize_stat ();
}
/* { dg-do compile { target powerpc*-*-* ia64-*-* x86_64-*-* } } */
/* { dg-options "-O3 -fkeep-inline-functions -fsel-sched-pipelining -fselective-scheduling2 -funroll-loops" } */
struct S
{
unsigned i, j;
};
static inline void
bar (struct S *s)
{
if (s->i++ == 1)
{
s->i = 0;
s->j++;
}
}
void
foo1 (struct S *s)
{
bar (s);
}
void
foo2 (struct S s1, struct S s2, int i)
{
while (s1.i != s2.i) {
if (i)
*(unsigned *) 0 |= (1U << s1.i);
bar (&s1);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment