Commit 63bb59a3 by Zdenek Dvorak Committed by Kazu Hirata

re PR tree-optimization/18601 (tree cfglceanup is slow)

	PR tree-optimization/18601
	* tree-cfg.c (thread_jumps, thread_jumps_from_bb): Removed.
	(tree_forwarder_block_p): Do not consider blocks that are its own
	successors forwarders.
	(cleanup_forwarder_blocks, remove_forwarder_block): New functions.
	(cleanup_tree_cfg): Use cleanup_forwarder_blocks instead of
	thread_jumps.
	* tree-flow.h (bb_ann_d): Remove forwardable.

Co-Authored-By: Kazu Hirata <kazu@cs.umass.edu>

From-SVN: r91787
parent c4cfdac0
2004-12-06 Zdenek Dvorak <dvorakz@suse.cz>
Kazu Hirata <kazu@cs.umass.edu>
PR tree-optimization/18601
* tree-cfg.c (thread_jumps, thread_jumps_from_bb): Removed.
(tree_forwarder_block_p): Do not consider blocks that are its own
successors forwarders.
(cleanup_forwarder_blocks, remove_forwarder_block): New functions.
(cleanup_tree_cfg): Use cleanup_forwarder_blocks instead of
thread_jumps.
* tree-flow.h (bb_ann_d): Remove forwardable.
2004-12-06 Kazu Hirata <kazu@cs.umass.edu> 2004-12-06 Kazu Hirata <kazu@cs.umass.edu>
* expr.c (expand_expr_real_1): Remove an "if" whose condition * expr.c (expand_expr_real_1): Remove an "if" whose condition
......
...@@ -119,7 +119,6 @@ static void split_critical_edges (void); ...@@ -119,7 +119,6 @@ static void split_critical_edges (void);
static inline bool stmt_starts_bb_p (tree, tree); static inline bool stmt_starts_bb_p (tree, tree);
static int tree_verify_flow_info (void); static int tree_verify_flow_info (void);
static void tree_make_forwarder_block (edge); static void tree_make_forwarder_block (edge);
static bool thread_jumps (void);
static bool tree_forwarder_block_p (basic_block); static bool tree_forwarder_block_p (basic_block);
static void tree_cfg2vcg (FILE *); static void tree_cfg2vcg (FILE *);
...@@ -133,6 +132,7 @@ static edge find_taken_edge_cond_expr (basic_block, tree); ...@@ -133,6 +132,7 @@ static edge find_taken_edge_cond_expr (basic_block, tree);
static edge find_taken_edge_switch_expr (basic_block, tree); static edge find_taken_edge_switch_expr (basic_block, tree);
static tree find_case_label_for_value (tree, tree); static tree find_case_label_for_value (tree, tree);
static bool phi_alternatives_equal (basic_block, edge, edge); static bool phi_alternatives_equal (basic_block, edge, edge);
static bool cleanup_forwarder_blocks (void);
/*--------------------------------------------------------------------------- /*---------------------------------------------------------------------------
...@@ -900,11 +900,12 @@ cleanup_tree_cfg (void) ...@@ -900,11 +900,12 @@ cleanup_tree_cfg (void)
retval = cleanup_control_flow (); retval = cleanup_control_flow ();
retval |= delete_unreachable_blocks (); retval |= delete_unreachable_blocks ();
/* thread_jumps can redirect edges out of SWITCH_EXPRs, which can get /* cleanup_forwarder_blocks can redirect edges out of SWITCH_EXPRs,
expensive. So we want to enable recording of edge to CASE_LABEL_EXPR which can get expensive. So we want to enable recording of edge
mappings around the call to thread_jumps. */ to CASE_LABEL_EXPR mappings around the call to
cleanup_forwarder_blocks. */
start_recording_case_labels (); start_recording_case_labels ();
retval |= thread_jumps (); retval |= cleanup_forwarder_blocks ();
end_recording_case_labels (); end_recording_case_labels ();
#ifdef ENABLE_CHECKING #ifdef ENABLE_CHECKING
...@@ -912,7 +913,7 @@ cleanup_tree_cfg (void) ...@@ -912,7 +913,7 @@ cleanup_tree_cfg (void)
{ {
gcc_assert (!cleanup_control_flow ()); gcc_assert (!cleanup_control_flow ());
gcc_assert (!delete_unreachable_blocks ()); gcc_assert (!delete_unreachable_blocks ());
gcc_assert (!thread_jumps ()); gcc_assert (!cleanup_forwarder_blocks ());
} }
#endif #endif
...@@ -3891,6 +3892,8 @@ tree_forwarder_block_p (basic_block bb) ...@@ -3891,6 +3892,8 @@ tree_forwarder_block_p (basic_block bb)
|| phi_nodes (bb) || phi_nodes (bb)
/* BB may not be a predecessor of EXIT_BLOCK_PTR. */ /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
|| EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR || EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR
/* Nor should this be an infinite loop. */
|| EDGE_SUCC (bb, 0)->dest == bb
/* BB may not have an abnormal outgoing edge. */ /* BB may not have an abnormal outgoing edge. */
|| (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL)) || (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
return false; return false;
...@@ -3923,282 +3926,182 @@ tree_forwarder_block_p (basic_block bb) ...@@ -3923,282 +3926,182 @@ tree_forwarder_block_p (basic_block bb)
return true; return true;
} }
/* Thread jumps from BB. */ /* Return true if BB has at least one abnormal incoming edge. */
static bool static inline bool
thread_jumps_from_bb (basic_block bb) has_abnormal_incoming_edge_p (basic_block bb)
{ {
edge_iterator ei;
edge e; edge e;
bool retval = false; edge_iterator ei;
/* Examine each of our block's successors to see if it is FOR_EACH_EDGE (e, ei, bb->preds)
forwardable. */ if (e->flags & EDGE_ABNORMAL)
for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) return true;
{
int freq; return false;
gcov_type count; }
edge last, old;
basic_block dest, tmp, curr, old_dest; /* Removes forwarder block BB. Returns false if this failed. If new
forwarder block is created due to redirection of edges, it is
stored to worklist. */
static bool
remove_forwarder_block (basic_block bb, basic_block **worklist)
{
edge succ = EDGE_SUCC (bb, 0), e, s;
basic_block dest = succ->dest;
tree label;
tree phi; tree phi;
edge_iterator ei;
block_stmt_iterator bsi, bsi_to;
bool seen_abnormal_edge = false;
/* If the edge is abnormal or its destination is not /* We check for infinite loops already in tree_forwarder_block_p.
forwardable, then there's nothing to do. */ However it may happen that the infinite loop is created
if ((e->flags & EDGE_ABNORMAL) afterwards due to removal of forwarders. */
|| !bb_ann (e->dest)->forwardable) if (dest == bb)
{ return false;
ei_next (&ei);
continue; /* If the destination block consists of an nonlocal label, do not merge
} it. */
label = first_stmt (bb);
if (label
&& TREE_CODE (label) == LABEL_EXPR
&& DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
return false;
/* Now walk through as many forwarder blocks as possible to find /* If there is an abnormal edge to basic block BB, but not into
the ultimate destination we want to thread our jump to. */ dest, problems might occur during removal of the phi node at out
last = EDGE_SUCC (e->dest, 0); of ssa due to overlapping live ranges of registers.
bb_ann (e->dest)->forwardable = 0;
for (dest = EDGE_SUCC (e->dest, 0)->dest;
bb_ann (dest)->forwardable;
last = EDGE_SUCC (dest, 0),
dest = EDGE_SUCC (dest, 0)->dest)
bb_ann (dest)->forwardable = 0;
/* Reset the forwardable marks to 1. */ If there is an abnormal edge in DEST, the problems would occur
for (tmp = e->dest; anyway since cleanup_dead_labels would then merge the labels for
tmp != dest; two different eh regions, and rest of exception handling code
tmp = EDGE_SUCC (tmp, 0)->dest) does not like it.
bb_ann (tmp)->forwardable = 1;
if (dest == e->dest) So if there is an abnormal edge to BB, proceed only if there is
no abnormal edge to DEST and there are no phi nodes in DEST. */
if (has_abnormal_incoming_edge_p (bb))
{ {
ei_next (&ei); seen_abnormal_edge = true;
continue;
if (has_abnormal_incoming_edge_p (dest)
|| phi_nodes (dest) != NULL_TREE)
return false;
} }
old = find_edge (bb, dest); /* If there are phi nodes in DEST, and some of the blocks that are
if (old) predecessors of BB are also predecessors of DEST, check that the
phi node arguments match. */
if (phi_nodes (dest))
{ {
/* If there already is an edge, check whether the values in FOR_EACH_EDGE (e, ei, bb->preds)
phi nodes differ. */
if (!phi_alternatives_equal (dest, last, old))
{
/* The previous block is forwarder. Redirect our jump
to that target instead since we know it has no PHI
nodes that will need updating. */
dest = last->src;
/* That might mean that no forwarding at all is
possible. */
if (dest == e->dest)
{ {
ei_next (&ei); s = find_edge (e->src, dest);
if (!s)
continue; continue;
}
old = find_edge (bb, dest); if (!phi_alternatives_equal (dest, succ, s))
return false;
} }
} }
/* Perform the redirection. */ /* Redirect the edges. */
retval = true; for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
count = e->count; {
freq = EDGE_FREQUENCY (e); if (e->flags & EDGE_ABNORMAL)
old_dest = e->dest; {
e = redirect_edge_and_branch (e, dest); /* If there is an abnormal edge, redirect it anyway, and
move the labels to the new block to make it legal. */
/* Update the profile. */ s = redirect_edge_succ_nodup (e, dest);
if (profile_status != PROFILE_ABSENT) }
for (curr = old_dest; else
curr != dest; s = redirect_edge_and_branch (e, dest);
curr = EDGE_SUCC (curr, 0)->dest)
{
curr->frequency -= freq;
if (curr->frequency < 0)
curr->frequency = 0;
curr->count -= count;
if (curr->count < 0)
curr->count = 0;
EDGE_SUCC (curr, 0)->count -= count;
if (EDGE_SUCC (curr, 0)->count < 0)
EDGE_SUCC (curr, 0)->count = 0;
}
if (!old)
{
/* Update PHI nodes. We know that the new argument should
have the same value as the argument associated with LAST.
Otherwise we would have changed our target block
above. */
int arg = last->dest_idx;
if (s == e)
{
/* Create arguments for the phi nodes, since the edge was not
here before. */
for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi)) for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
}
else
{ {
tree def = PHI_ARG_DEF (phi, arg); /* The source basic block might become a forwarder. We know
gcc_assert (def != NULL_TREE); that it was not a forwarder before, since it used to have
add_phi_arg (phi, def, e); at least two outgoing edges, so we may just add it to
worklist. */
if (tree_forwarder_block_p (s->src))
*(*worklist)++ = s->src;
} }
} }
/* Remove the unreachable blocks (observe that if all blocks if (seen_abnormal_edge)
were reachable before, only those in the path we threaded
over and did not have any predecessor outside of the path
become unreachable). */
for (; old_dest != dest; old_dest = tmp)
{ {
tmp = EDGE_SUCC (old_dest, 0)->dest; /* Move the labels to the new block, so that the redirection of
the abnormal edges works. */
if (EDGE_COUNT (old_dest->preds) > 0) bsi_to = bsi_start (dest);
break; for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
{
delete_basic_block (old_dest); label = bsi_stmt (bsi);
gcc_assert (TREE_CODE (label) == LABEL_EXPR);
bsi_remove (&bsi);
bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
}
} }
/* Update the dominators. */ /* Update the dominators. */
if (dom_info_available_p (CDI_DOMINATORS)) if (dom_info_available_p (CDI_DOMINATORS))
{ {
/* If the dominator of the destination was in the basic_block dom, dombb, domdest;
path, set its dominator to the start of the
redirected edge. */
if (get_immediate_dominator (CDI_DOMINATORS, old_dest) == NULL)
set_immediate_dominator (CDI_DOMINATORS, old_dest, bb);
/* Now proceed like if we forwarded just over one edge at a
time. Algorithm for forwarding edge S --> A over
edge A --> B then is
if (idom (B) == A dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
&& !dominated_by (S, B)) domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
idom (B) = idom (A); if (domdest == bb)
recount_idom (A); */
for (; old_dest != dest; old_dest = tmp)
{
basic_block dom;
tmp = EDGE_SUCC (old_dest, 0)->dest;
if (get_immediate_dominator (CDI_DOMINATORS, tmp) == old_dest
&& !dominated_by_p (CDI_DOMINATORS, bb, tmp))
{ {
dom = get_immediate_dominator (CDI_DOMINATORS, old_dest); /* Shortcut to avoid calling (relatively expensive)
set_immediate_dominator (CDI_DOMINATORS, tmp, dom); nearest_common_dominator unless necessary. */
dom = dombb;
} }
else
dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
dom = recount_dominator (CDI_DOMINATORS, old_dest); set_immediate_dominator (CDI_DOMINATORS, dest, dom);
set_immediate_dominator (CDI_DOMINATORS, old_dest, dom);
}
}
} }
return retval; /* And kill the forwarder block. */
} delete_basic_block (bb);
/* Thread jumps over empty statements. return true;
}
This code should _not_ thread over obviously equivalent conditions
as that requires nontrivial updates to the SSA graph.
As a precondition, we require that all basic blocks be reachable. /* Removes forwarder blocks. */
That is, there should be no opportunities left for
delete_unreachable_blocks. */
static bool static bool
thread_jumps (void) cleanup_forwarder_blocks (void)
{ {
basic_block bb; basic_block bb;
bool retval = false; bool changed = false;
basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks); basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
basic_block *current = worklist; basic_block *current = worklist;
FOR_EACH_BB (bb) FOR_EACH_BB (bb)
{ {
bb_ann (bb)->forwardable = tree_forwarder_block_p (bb); if (tree_forwarder_block_p (bb))
bb->flags &= ~BB_VISITED; *current++ = bb;
}
/* We pretend to have ENTRY_BLOCK_PTR in WORKLIST. This way,
ENTRY_BLOCK_PTR will never be entered into WORKLIST. */
ENTRY_BLOCK_PTR->flags |= BB_VISITED;
/* Initialize WORKLIST by putting non-forwarder blocks that
immediately precede forwarder blocks because those are the ones
that we know we can thread jumps from. We use BB_VISITED to
indicate whether a given basic block is in WORKLIST or not,
thereby avoiding duplicates in WORKLIST. */
FOR_EACH_BB (bb)
{
edge_iterator ei;
edge e;
/* We are not interested in finding non-forwarder blocks
directly. We want to find non-forwarder blocks as
predecessors of a forwarder block. */
if (!bb_ann (bb)->forwardable)
continue;
/* Now we know BB is a forwarder block. Visit each of its
incoming edges and add to WORKLIST all non-forwarder blocks
among BB's predecessors. */
FOR_EACH_EDGE (e, ei, bb->preds)
{
/* We don't want to put a duplicate into WORKLIST. */
if ((e->src->flags & BB_VISITED) == 0
/* We are not interested in threading jumps from a forwarder
block. */
&& !bb_ann (e->src)->forwardable)
{
e->src->flags |= BB_VISITED;
*current++ = e->src;
}
}
} }
/* Now let's drain WORKLIST. */ while (current != worklist)
while (worklist != current)
{ {
bb = *--current; bb = *--current;
changed |= remove_forwarder_block (bb, &current);
/* BB is no longer in WORKLIST, so clear BB_VISITED. */
bb->flags &= ~BB_VISITED;
if (thread_jumps_from_bb (bb))
{
retval = true;
if (tree_forwarder_block_p (bb))
{
edge_iterator ej;
edge f;
bb_ann (bb)->forwardable = true;
/* Attempts to thread through BB may have been blocked
because BB was not a forwarder block before. Now
that BB is a forwarder block, we should revisit BB's
predecessors. */
FOR_EACH_EDGE (f, ej, bb->preds)
{
/* We don't want to put a duplicate into WORKLIST. */
if ((f->src->flags & BB_VISITED) == 0
/* We are not interested in threading jumps from a
forwarder block. */
&& !bb_ann (f->src)->forwardable)
{
f->src->flags |= BB_VISITED;
*current++ = f->src;
}
}
}
} }
}
ENTRY_BLOCK_PTR->flags &= ~BB_VISITED;
free (worklist); free (worklist);
return changed;
return retval;
} }
/* Return a non-special label in the head of basic block BLOCK. /* Return a non-special label in the head of basic block BLOCK.
Create one if it doesn't exist. */ Create one if it doesn't exist. */
......
...@@ -345,10 +345,6 @@ struct bb_ann_d GTY(()) ...@@ -345,10 +345,6 @@ struct bb_ann_d GTY(())
/* Chain of PHI nodes for this block. */ /* Chain of PHI nodes for this block. */
tree phi_nodes; tree phi_nodes;
/* Nonzero if this block is forwardable during cfg cleanups. This is also
used to detect loops during cfg cleanups. */
unsigned forwardable: 1;
/* Nonzero if this block contains an escape point (see is_escape_site). */ /* Nonzero if this block contains an escape point (see is_escape_site). */
unsigned has_escape_site : 1; unsigned has_escape_site : 1;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment