Commit d25aa7ab by Paolo Bonzini Committed by Paolo Bonzini

bb-reorder.c (partition_hot_cold_basic_blocks): Do not enter/exit cfglayout mode.

2009-03-30  Paolo Bonzini  <bonzini@gnu.org>

	* bb-reorder.c (partition_hot_cold_basic_blocks): Do not
	enter/exit cfglayout mode.
	(pass_partition_block): Require it.
	* combine.c (find_single_use, reg_dead_at_p): Use CFG.
	(combine_instructions): Track basic blocks instead of labels.
	(update_cfg_for_uncondjump): New.
	(try_combine): Use it.  Update jumps after rescanning.
	(pass_combine): Require PROP_cfglayout.
	* passes.c (pass_outof_cfg_layout_mode): Move after regmove.

From-SVN: r145283
parent 9c9e26f5
2009-03-30 Paolo Bonzini <bonzini@gnu.org> 2009-03-30 Paolo Bonzini <bonzini@gnu.org>
* bb-reorder.c (partition_hot_cold_basic_blocks): Do not
enter/exit cfglayout mode.
(pass_partition_block): Require it.
* combine.c (find_single_use, reg_dead_at_p): Use CFG.
(combine_instructions): Track basic blocks instead of labels.
(update_cfg_for_uncondjump): New.
(try_combine): Use it. Update jumps after rescanning.
(pass_combine): Require PROP_cfglayout.
* passes.c (pass_outof_cfg_layout_mode): Move after regmove.
2009-03-30 Paolo Bonzini <bonzini@gnu.org>
* cfglayout.c (pass_into_cfg_layout_mode, pass_outof_cfg_layout_mode): * cfglayout.c (pass_into_cfg_layout_mode, pass_outof_cfg_layout_mode):
Provide/destroy PROP_cfglayout respectively. Provide/destroy PROP_cfglayout respectively.
* gcse.c (pass_jump_bypass, pass_gcse): Require it. * gcse.c (pass_jump_bypass, pass_gcse): Require it.
......
...@@ -2177,7 +2177,6 @@ struct rtl_opt_pass pass_duplicate_computed_gotos = ...@@ -2177,7 +2177,6 @@ struct rtl_opt_pass pass_duplicate_computed_gotos =
static void static void
partition_hot_cold_basic_blocks (void) partition_hot_cold_basic_blocks (void)
{ {
basic_block cur_bb;
edge *crossing_edges; edge *crossing_edges;
int n_crossing_edges; int n_crossing_edges;
int max_edges = 2 * last_basic_block; int max_edges = 2 * last_basic_block;
...@@ -2187,13 +2186,6 @@ partition_hot_cold_basic_blocks (void) ...@@ -2187,13 +2186,6 @@ partition_hot_cold_basic_blocks (void)
crossing_edges = XCNEWVEC (edge, max_edges); crossing_edges = XCNEWVEC (edge, max_edges);
cfg_layout_initialize (0);
FOR_EACH_BB (cur_bb)
if (cur_bb->index >= NUM_FIXED_BLOCKS
&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
cur_bb->aux = cur_bb->next_bb;
find_rarely_executed_basic_blocks_and_crossing_edges (&crossing_edges, find_rarely_executed_basic_blocks_and_crossing_edges (&crossing_edges,
&n_crossing_edges, &n_crossing_edges,
&max_edges); &max_edges);
...@@ -2202,8 +2194,6 @@ partition_hot_cold_basic_blocks (void) ...@@ -2202,8 +2194,6 @@ partition_hot_cold_basic_blocks (void)
fix_edges_for_rarely_executed_code (crossing_edges, n_crossing_edges); fix_edges_for_rarely_executed_code (crossing_edges, n_crossing_edges);
free (crossing_edges); free (crossing_edges);
cfg_layout_finalize ();
} }
static bool static bool
...@@ -2300,7 +2290,7 @@ struct rtl_opt_pass pass_partition_blocks = ...@@ -2300,7 +2290,7 @@ struct rtl_opt_pass pass_partition_blocks =
NULL, /* next */ NULL, /* next */
0, /* static_pass_number */ 0, /* static_pass_number */
TV_REORDER_BLOCKS, /* tv_id */ TV_REORDER_BLOCKS, /* tv_id */
0, /* properties_required */ PROP_cfglayout, /* properties_required */
0, /* properties_provided */ 0, /* properties_provided */
0, /* properties_destroyed */ 0, /* properties_destroyed */
0, /* todo_flags_start */ 0, /* todo_flags_start */
......
...@@ -605,6 +605,7 @@ find_single_use_1 (rtx dest, rtx *loc) ...@@ -605,6 +605,7 @@ find_single_use_1 (rtx dest, rtx *loc)
static rtx * static rtx *
find_single_use (rtx dest, rtx insn, rtx *ploc) find_single_use (rtx dest, rtx insn, rtx *ploc)
{ {
basic_block bb;
rtx next; rtx next;
rtx *result; rtx *result;
rtx link; rtx link;
...@@ -627,9 +628,10 @@ find_single_use (rtx dest, rtx insn, rtx *ploc) ...@@ -627,9 +628,10 @@ find_single_use (rtx dest, rtx insn, rtx *ploc)
if (!REG_P (dest)) if (!REG_P (dest))
return 0; return 0;
for (next = next_nonnote_insn (insn); bb = BLOCK_FOR_INSN (insn);
next != 0 && !LABEL_P (next); for (next = NEXT_INSN (insn);
next = next_nonnote_insn (next)) next && BLOCK_FOR_INSN (next) == bb;
next = NEXT_INSN (next))
if (INSN_P (next) && dead_or_set_p (next, dest)) if (INSN_P (next) && dead_or_set_p (next, dest))
{ {
for (link = LOG_LINKS (next); link; link = XEXP (link, 1)) for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
...@@ -1062,17 +1064,19 @@ combine_instructions (rtx f, unsigned int nregs) ...@@ -1062,17 +1064,19 @@ combine_instructions (rtx f, unsigned int nregs)
Also set any known values so that we can use it while searching Also set any known values so that we can use it while searching
for what bits are known to be set. */ for what bits are known to be set. */
label_tick = label_tick_ebb_start = 1;
setup_incoming_promotions (first); setup_incoming_promotions (first);
create_log_links (); create_log_links ();
label_tick_ebb_start = ENTRY_BLOCK_PTR->index;
FOR_EACH_BB (this_basic_block) FOR_EACH_BB (this_basic_block)
{ {
optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block); optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
last_call_luid = 0; last_call_luid = 0;
mem_last_set = -1; mem_last_set = -1;
label_tick++; label_tick = this_basic_block->index;
if (!single_pred_p (this_basic_block)
|| single_pred (this_basic_block)->index != label_tick - 1)
label_tick_ebb_start = label_tick;
FOR_BB_INSNS (this_basic_block, insn) FOR_BB_INSNS (this_basic_block, insn)
if (INSN_P (insn) && BLOCK_FOR_INSN (insn)) if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
{ {
...@@ -1098,15 +1102,13 @@ combine_instructions (rtx f, unsigned int nregs) ...@@ -1098,15 +1102,13 @@ combine_instructions (rtx f, unsigned int nregs)
fprintf(dump_file, "insn_cost %d: %d\n", fprintf(dump_file, "insn_cost %d: %d\n",
INSN_UID (insn), INSN_COST (insn)); INSN_UID (insn), INSN_COST (insn));
} }
else if (LABEL_P (insn))
label_tick_ebb_start = label_tick;
} }
nonzero_sign_valid = 1; nonzero_sign_valid = 1;
/* Now scan all the insns in forward order. */ /* Now scan all the insns in forward order. */
label_tick = label_tick_ebb_start = 1; label_tick_ebb_start = ENTRY_BLOCK_PTR->index;
init_reg_last (); init_reg_last ();
setup_incoming_promotions (first); setup_incoming_promotions (first);
...@@ -1115,7 +1117,10 @@ combine_instructions (rtx f, unsigned int nregs) ...@@ -1115,7 +1117,10 @@ combine_instructions (rtx f, unsigned int nregs)
optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block); optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
last_call_luid = 0; last_call_luid = 0;
mem_last_set = -1; mem_last_set = -1;
label_tick++; label_tick = this_basic_block->index;
if (!single_pred_p (this_basic_block)
|| single_pred (this_basic_block)->index != label_tick - 1)
label_tick_ebb_start = label_tick;
rtl_profile_for_bb (this_basic_block); rtl_profile_for_bb (this_basic_block);
for (insn = BB_HEAD (this_basic_block); for (insn = BB_HEAD (this_basic_block);
insn != NEXT_INSN (BB_END (this_basic_block)); insn != NEXT_INSN (BB_END (this_basic_block));
...@@ -1268,8 +1273,6 @@ combine_instructions (rtx f, unsigned int nregs) ...@@ -1268,8 +1273,6 @@ combine_instructions (rtx f, unsigned int nregs)
retry: retry:
; ;
} }
else if (LABEL_P (insn))
label_tick_ebb_start = label_tick;
} }
} }
...@@ -2159,6 +2162,25 @@ reg_subword_p (rtx x, rtx reg) ...@@ -2159,6 +2162,25 @@ reg_subword_p (rtx x, rtx reg)
} }
/* Delete the conditional jump INSN and adjust the CFG correspondingly.
Note that the INSN should be deleted *after* removing dead edges, so
that the kept edge is the fallthrough edge for a (set (pc) (pc))
but not for a (set (pc) (label_ref FOO)). */
static void
update_cfg_for_uncondjump (rtx insn)
{
basic_block bb = BLOCK_FOR_INSN (insn);
if (BB_END (bb) == insn)
purge_dead_edges (bb);
delete_insn (insn);
if (EDGE_COUNT (bb->succs) == 1)
single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
}
/* Try to combine the insns I1 and I2 into I3. /* Try to combine the insns I1 and I2 into I3.
Here I1 and I2 appear earlier than I3. Here I1 and I2 appear earlier than I3.
I1 can be zero; then we combine just I2 into I3. I1 can be zero; then we combine just I2 into I3.
...@@ -3712,41 +3734,6 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p) ...@@ -3712,41 +3734,6 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
if (newi2pat) if (newi2pat)
note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL); note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL); note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
/* Set new_direct_jump_p if a new return or simple jump instruction
has been created.
If I3 is now an unconditional jump, ensure that it has a
BARRIER following it since it may have initially been a
conditional jump. It may also be the last nonnote insn. */
if (returnjump_p (i3) || any_uncondjump_p (i3))
{
*new_direct_jump_p = 1;
mark_jump_label (PATTERN (i3), i3, 0);
if ((temp = next_nonnote_insn (i3)) == NULL_RTX
|| !BARRIER_P (temp))
emit_barrier_after (i3);
}
if (undobuf.other_insn != NULL_RTX
&& (returnjump_p (undobuf.other_insn)
|| any_uncondjump_p (undobuf.other_insn)))
{
*new_direct_jump_p = 1;
if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
|| !BARRIER_P (temp))
emit_barrier_after (undobuf.other_insn);
}
/* An NOOP jump does not need barrier, but it does need cleaning up
of CFG. */
if (GET_CODE (newpat) == SET
&& SET_SRC (newpat) == pc_rtx
&& SET_DEST (newpat) == pc_rtx)
*new_direct_jump_p = 1;
} }
if (undobuf.other_insn != NULL_RTX) if (undobuf.other_insn != NULL_RTX)
...@@ -3789,6 +3776,34 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p) ...@@ -3789,6 +3776,34 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
df_insn_rescan (i3); df_insn_rescan (i3);
} }
/* Set new_direct_jump_p if a new return or simple jump instruction
has been created. Adjust the CFG accordingly. */
if (returnjump_p (i3) || any_uncondjump_p (i3))
{
*new_direct_jump_p = 1;
mark_jump_label (PATTERN (i3), i3, 0);
update_cfg_for_uncondjump (i3);
}
if (undobuf.other_insn != NULL_RTX
&& (returnjump_p (undobuf.other_insn)
|| any_uncondjump_p (undobuf.other_insn)))
{
*new_direct_jump_p = 1;
update_cfg_for_uncondjump (undobuf.other_insn);
}
/* A noop might also need cleaning up of CFG, if it comes from the
simplification of a jump. */
if (GET_CODE (newpat) == SET
&& SET_SRC (newpat) == pc_rtx
&& SET_DEST (newpat) == pc_rtx)
{
*new_direct_jump_p = 1;
update_cfg_for_uncondjump (i3);
}
combine_successes++; combine_successes++;
undo_commit (); undo_commit ();
...@@ -11984,10 +11999,12 @@ reg_dead_at_p (rtx reg, rtx insn) ...@@ -11984,10 +11999,12 @@ reg_dead_at_p (rtx reg, rtx insn)
return 0; return 0;
} }
/* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
beginning of function. */ beginning of basic block. */
for (; insn && !LABEL_P (insn) && !BARRIER_P (insn); block = BLOCK_FOR_INSN (insn);
insn = prev_nonnote_insn (insn)) for (;;)
{
if (INSN_P (insn))
{ {
note_stores (PATTERN (insn), reg_dead_at_p_1, NULL); note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
if (reg_dead_flag) if (reg_dead_flag)
...@@ -11997,19 +12014,13 @@ reg_dead_at_p (rtx reg, rtx insn) ...@@ -11997,19 +12014,13 @@ reg_dead_at_p (rtx reg, rtx insn)
return 1; return 1;
} }
/* Get the basic block that we were in. */
if (insn == 0)
block = ENTRY_BLOCK_PTR->next_bb;
else
{
FOR_EACH_BB (block)
if (insn == BB_HEAD (block)) if (insn == BB_HEAD (block))
break; break;
if (block == EXIT_BLOCK_PTR) insn = PREV_INSN (insn);
return 0;
} }
/* Look at live-in sets for the basic block that we were in. */
for (i = reg_dead_regno; i < reg_dead_endregno; i++) for (i = reg_dead_regno; i < reg_dead_endregno; i++)
if (REGNO_REG_SET_P (df_get_live_in (block), i)) if (REGNO_REG_SET_P (df_get_live_in (block), i))
return 0; return 0;
...@@ -13025,7 +13036,7 @@ struct rtl_opt_pass pass_combine = ...@@ -13025,7 +13036,7 @@ struct rtl_opt_pass pass_combine =
NULL, /* next */ NULL, /* next */
0, /* static_pass_number */ 0, /* static_pass_number */
TV_COMBINE, /* tv_id */ TV_COMBINE, /* tv_id */
0, /* properties_required */ PROP_cfglayout, /* properties_required */
0, /* properties_provided */ 0, /* properties_provided */
0, /* properties_destroyed */ 0, /* properties_destroyed */
0, /* todo_flags_start */ 0, /* todo_flags_start */
......
...@@ -758,12 +758,12 @@ init_optimization_passes (void) ...@@ -758,12 +758,12 @@ init_optimization_passes (void)
NEXT_PASS (pass_reginfo_init); NEXT_PASS (pass_reginfo_init);
NEXT_PASS (pass_inc_dec); NEXT_PASS (pass_inc_dec);
NEXT_PASS (pass_initialize_regs); NEXT_PASS (pass_initialize_regs);
NEXT_PASS (pass_outof_cfg_layout_mode);
NEXT_PASS (pass_ud_rtl_dce); NEXT_PASS (pass_ud_rtl_dce);
NEXT_PASS (pass_combine); NEXT_PASS (pass_combine);
NEXT_PASS (pass_if_after_combine); NEXT_PASS (pass_if_after_combine);
NEXT_PASS (pass_partition_blocks); NEXT_PASS (pass_partition_blocks);
NEXT_PASS (pass_regmove); NEXT_PASS (pass_regmove);
NEXT_PASS (pass_outof_cfg_layout_mode);
NEXT_PASS (pass_split_all_insns); NEXT_PASS (pass_split_all_insns);
NEXT_PASS (pass_lower_subreg2); NEXT_PASS (pass_lower_subreg2);
NEXT_PASS (pass_df_initialize_no_opt); NEXT_PASS (pass_df_initialize_no_opt);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment