Commit 01f62f01 by Jan Hubicka Committed by Jan Hubicka

rtl.h (cleanup_barriers): Declare.

	* rtl.h (cleanup_barriers): Declare.
	* jump.c (cleanup_barriers): New function.
	* toplev.c (rest_of_compilation): Call cleanup_barriers
	before loop optimizer and after bb_reorder.

	* flow.c (back_edge_of_syntactic_loop_p): New.
	(split_edge): Use it.

From-SVN: r44409
parent aa069f77
Thu Jul 26 22:30:22 CEST 2001 Jan Hubicka <jh@suse.cz>
* rtl.h (cleanup_barriers): Declare.
* jump.c (cleanup_barriers): New function.
* toplev.c (rest_of_compilation): Call cleanup_barriers
before loop optimizer and after bb_reorder.
* flow.c (back_edge_of_syntactic_loop_p): New.
(split_edge): Use it.
2001-07-26 Rainer Orth <ro@TechFak.Uni-Bielefeld.DE> 2001-07-26 Rainer Orth <ro@TechFak.Uni-Bielefeld.DE>
* glimits.h (_MACH_MACHLIMITS_H_): Delete. * glimits.h (_MACH_MACHLIMITS_H_): Delete.
......
...@@ -482,6 +482,7 @@ static void flow_loops_tree_build PARAMS ((struct loops *)); ...@@ -482,6 +482,7 @@ static void flow_loops_tree_build PARAMS ((struct loops *));
static int flow_loop_level_compute PARAMS ((struct loop *, int)); static int flow_loop_level_compute PARAMS ((struct loop *, int));
static int flow_loops_level_compute PARAMS ((struct loops *)); static int flow_loops_level_compute PARAMS ((struct loops *));
static void delete_dead_jumptables PARAMS ((void)); static void delete_dead_jumptables PARAMS ((void));
static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
/* Find basic blocks of the current function. /* Find basic blocks of the current function.
F is the first insn of the function and NREGS the number of register F is the first insn of the function and NREGS the number of register
...@@ -1968,6 +1969,30 @@ redirect_edge_and_branch_force (e, target) ...@@ -1968,6 +1969,30 @@ redirect_edge_and_branch_force (e, target)
return new_bb; return new_bb;
} }
/* Helper function for split_edge. Return true in case edge BB2 to BB1
is back edge of syntactic loop. */
static bool
back_edge_of_syntactic_loop_p (bb1, bb2)
basic_block bb1, bb2;
{
rtx insn;
int count;
if (bb1->index > bb2->index)
return false;
if (bb1->index == bb2->index)
return true;
for (insn = bb1->end; insn != bb2->head && count >= 0;
insn = NEXT_INSN (insn))
if (GET_CODE (insn) == NOTE)
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
count++;
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
count--;
}
return count >= 0;
}
/* Split a (typically critical) edge. Return the new block. /* Split a (typically critical) edge. Return the new block.
Abort on abnormal edges. Abort on abnormal edges.
...@@ -2115,7 +2140,8 @@ split_edge (edge_in) ...@@ -2115,7 +2140,8 @@ split_edge (edge_in)
if (old_succ != EXIT_BLOCK_PTR if (old_succ != EXIT_BLOCK_PTR
&& PREV_INSN (old_succ->head) && PREV_INSN (old_succ->head)
&& GET_CODE (PREV_INSN (old_succ->head)) == NOTE && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
&& NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG) && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG
&& !back_edge_of_syntactic_loop_p (old_succ, old_pred))
bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
PREV_INSN (old_succ->head)); PREV_INSN (old_succ->head));
else if (old_succ != EXIT_BLOCK_PTR) else if (old_succ != EXIT_BLOCK_PTR)
......
...@@ -100,6 +100,32 @@ rebuild_jump_labels (f) ...@@ -100,6 +100,32 @@ rebuild_jump_labels (f)
LABEL_NUSES (XEXP (insn, 0))++; LABEL_NUSES (XEXP (insn, 0))++;
} }
/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
non-fallthru insn. This is not generally true, as multiple barriers
may have crept in, or the BARRIER may be separated from the last
real insn by one or more NOTEs.
This simple pass moves barriers and removes duplicates so that the
old code is happy.
*/
void
cleanup_barriers ()
{
rtx insn, next, prev;
for (insn = get_insns (); insn; insn = next)
{
next = NEXT_INSN (insn);
if (GET_CODE (insn) == BARRIER)
{
prev = prev_nonnote_insn (insn);
if (GET_CODE (prev) == BARRIER)
delete_barrier (insn);
else if (prev != PREV_INSN (insn))
reorder_insns (insn, insn, prev);
}
}
}
void void
copy_loop_headers (f) copy_loop_headers (f)
rtx f; rtx f;
......
...@@ -1286,6 +1286,7 @@ extern enum rtx_code swap_condition PARAMS ((enum rtx_code)); ...@@ -1286,6 +1286,7 @@ extern enum rtx_code swap_condition PARAMS ((enum rtx_code));
extern enum rtx_code unsigned_condition PARAMS ((enum rtx_code)); extern enum rtx_code unsigned_condition PARAMS ((enum rtx_code));
extern enum rtx_code signed_condition PARAMS ((enum rtx_code)); extern enum rtx_code signed_condition PARAMS ((enum rtx_code));
extern void mark_jump_label PARAMS ((rtx, rtx, int)); extern void mark_jump_label PARAMS ((rtx, rtx, int));
extern void cleanup_barriers PARAMS ((void));
/* In jump.c */ /* In jump.c */
extern rtx squeeze_notes PARAMS ((rtx, rtx)); extern rtx squeeze_notes PARAMS ((rtx, rtx));
......
...@@ -3096,6 +3096,8 @@ rest_of_compilation (decl) ...@@ -3096,6 +3096,8 @@ rest_of_compilation (decl)
if (flag_rerun_loop_opt) if (flag_rerun_loop_opt)
{ {
cleanup_barriers ();
/* We only want to perform unrolling once. */ /* We only want to perform unrolling once. */
loop_optimize (insns, rtl_dump_file, 0); loop_optimize (insns, rtl_dump_file, 0);
...@@ -3110,6 +3112,7 @@ rest_of_compilation (decl) ...@@ -3110,6 +3112,7 @@ rest_of_compilation (decl)
analysis code depends on this information. */ analysis code depends on this information. */
reg_scan (insns, max_reg_num (), 1); reg_scan (insns, max_reg_num (), 1);
} }
cleanup_barriers ();
loop_optimize (insns, rtl_dump_file, loop_optimize (insns, rtl_dump_file,
(flag_unroll_loops ? LOOP_UNROLL : 0) | LOOP_BCT); (flag_unroll_loops ? LOOP_UNROLL : 0) | LOOP_BCT);
...@@ -3614,6 +3617,7 @@ rest_of_compilation (decl) ...@@ -3614,6 +3617,7 @@ rest_of_compilation (decl)
/* CFG no longer kept up to date. */ /* CFG no longer kept up to date. */
purge_line_number_notes (insns); purge_line_number_notes (insns);
cleanup_barriers ();
/* If a scheduling pass for delayed branches is to be done, /* If a scheduling pass for delayed branches is to be done,
call the scheduling code. */ call the scheduling code. */
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment