Commit ea85edfe by Jakub Jelinek Committed by Michael Matz

re PR tree-optimization/51117 (rev.181172 causes glibc build failure)

	PR tree-optimization/51117
	* tree-eh.c (sink_clobbers): New function.
	(execute_lower_eh_dispatch): Call it for BBs ending with
	internally throwing RESX.
	* cfgexpand.c (add_scope_conflicts_1): Add all conflicts only
	at the first real instruction.

Co-Authored-By: Michael Matz <matz@suse.de>

From-SVN: r182283
parent bc45e4ba
2011-12-13 Jakub Jelinek <jakub@redhat.com>
Michael Matz <matz@suse.de>
PR tree-optimization/51117
* tree-eh.c (sink_clobbers): New function.
(execute_lower_eh_dispatch): Call it for BBs ending with
internally throwing RESX.
* cfgexpand.c (add_scope_conflicts_1): Add all conflicts only
at the first real instruction.
2011-12-13 Tristan Gingold <gingold@adacore.com>
* final.c (final_scan_insn): Guard the call to begin_epilogue
......@@ -456,34 +456,14 @@ add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
FOR_EACH_EDGE (e, ei, bb->preds)
bitmap_ior_into (work, (bitmap)e->src->aux);
if (for_conflict)
{
/* We need to add conflicts for everything life at the start of
this block. Unlike classical lifeness for named objects we can't
rely on seeing a def/use of the names we're interested in.
There might merely be indirect loads/stores. We'd not add any
conflicts for such partitions. */
bitmap_iterator bi;
unsigned i;
EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
{
unsigned j;
bitmap_iterator bj;
EXECUTE_IF_SET_IN_BITMAP (work, i, j, bj)
add_stack_var_conflict (i, j);
}
visit = visit_conflict;
}
else
visit = visit_op;
visit = visit_op;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
if (!is_gimple_debug (stmt))
walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
......@@ -501,7 +481,29 @@ add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
bitmap_clear_bit (work, *v);
}
else if (!is_gimple_debug (stmt))
walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
{
if (for_conflict
&& visit == visit_op)
{
/* If this is the first real instruction in this BB we need
to add conflicts for everything life at this point now.
Unlike classical lifeness for named objects we can't
rely on seeing a def/use of the names we're interested in.
There might merely be indirect loads/stores. We'd not add any
conflicts for such partitions. */
bitmap_iterator bi;
unsigned i;
EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
{
unsigned j;
bitmap_iterator bj;
EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
add_stack_var_conflict (i, j);
}
visit = visit_conflict;
}
walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
}
}
}
......
......@@ -3194,6 +3194,76 @@ optimize_clobbers (basic_block bb)
}
}
/* Try to sink var = {v} {CLOBBER} stmts followed just by
internal throw to successor BB. */
static int
sink_clobbers (basic_block bb)
{
edge e;
edge_iterator ei;
gimple_stmt_iterator gsi, dgsi;
basic_block succbb;
bool any_clobbers = false;
/* Only optimize if BB has a single EH successor and
all predecessor edges are EH too. */
if (!single_succ_p (bb)
|| (single_succ_edge (bb)->flags & EDGE_EH) == 0)
return 0;
FOR_EACH_EDGE (e, ei, bb->preds)
{
if ((e->flags & EDGE_EH) == 0)
return 0;
}
/* And BB contains only CLOBBER stmts before the final
RESX. */
gsi = gsi_last_bb (bb);
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
gimple stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
if (gimple_code (stmt) == GIMPLE_LABEL)
break;
if (!gimple_clobber_p (stmt)
|| TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
return 0;
any_clobbers = true;
}
if (!any_clobbers)
return 0;
succbb = single_succ (bb);
dgsi = gsi_after_labels (succbb);
gsi = gsi_last_bb (bb);
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
gimple stmt = gsi_stmt (gsi);
tree vdef;
if (is_gimple_debug (stmt))
continue;
if (gimple_code (stmt) == GIMPLE_LABEL)
break;
unlink_stmt_vdef (stmt);
gsi_remove (&gsi, false);
vdef = gimple_vdef (stmt);
if (vdef && TREE_CODE (vdef) == SSA_NAME)
{
vdef = SSA_NAME_VAR (vdef);
mark_sym_for_renaming (vdef);
gimple_set_vdef (stmt, vdef);
gimple_set_vuse (stmt, vdef);
}
release_defs (stmt);
gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT);
}
return TODO_update_ssa_only_virtuals;
}
/* At the end of inlining, we can lower EH_DISPATCH. Return true when
we have found some duplicate labels and removed some edges. */
......@@ -3349,7 +3419,7 @@ static unsigned
execute_lower_eh_dispatch (void)
{
basic_block bb;
bool any_rewritten = false;
int flags = 0;
bool redirected = false;
assign_filter_values ();
......@@ -3362,16 +3432,20 @@ execute_lower_eh_dispatch (void)
if (gimple_code (last) == GIMPLE_EH_DISPATCH)
{
redirected |= lower_eh_dispatch (bb, last);
any_rewritten = true;
flags |= TODO_update_ssa_only_virtuals;
}
else if (gimple_code (last) == GIMPLE_RESX)
{
if (stmt_can_throw_external (last))
optimize_clobbers (bb);
else
flags |= sink_clobbers (bb);
}
else if (gimple_code (last) == GIMPLE_RESX
&& stmt_can_throw_external (last))
optimize_clobbers (bb);
}
if (redirected)
delete_unreachable_blocks ();
return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
return flags;
}
static bool
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment