Commit fb768529 by Richard Biener Committed by Richard Biener

re PR tree-optimization/93199 (Compile time hog in sink_clobbers)

2020-01-08  Richard Biener  <rguenther@suse.de>

	PR middle-end/93199
	* tree-eh.c (sink_clobbers): Update virtual operands for
	the first and last stmt only.  Add a dry-run capability.
	(pass_lower_eh_dispatch::execute): Perform clobber sinking
	after CFG manipulations and in RPO order to catch all
	secondary opportunities reliably.

From-SVN: r280006
parent 949f0062
2020-01-08 Richard Biener <rguenther@suse.de>
PR middle-end/93199
* tree-eh.c (sink_clobbers): Update virtual operands for
the first and last stmt only. Add a dry-run capability.
(pass_lower_eh_dispatch::execute): Perform clobber sinking
after CFG manipulations and in RPO order to catch all
secondary opportunities reliably.
2020-01-08 Georg-Johann Lay <avr@gjlay.de> 2020-01-08 Georg-Johann Lay <avr@gjlay.de>
PR target/93182 PR target/93182
......
...@@ -3550,10 +3550,11 @@ optimize_clobbers (basic_block bb) ...@@ -3550,10 +3550,11 @@ optimize_clobbers (basic_block bb)
} }
/* Try to sink var = {v} {CLOBBER} stmts followed just by /* Try to sink var = {v} {CLOBBER} stmts followed just by
internal throw to successor BB. */ internal throw to successor BB. If FOUND_OPPORTUNITY is not NULL
then do not perform the optimization but set *FOUND_OPPORTUNITY to true. */
static int static int
sink_clobbers (basic_block bb) sink_clobbers (basic_block bb, bool *found_opportunity = NULL)
{ {
edge e; edge e;
edge_iterator ei; edge_iterator ei;
...@@ -3591,13 +3592,19 @@ sink_clobbers (basic_block bb) ...@@ -3591,13 +3592,19 @@ sink_clobbers (basic_block bb)
if (!any_clobbers) if (!any_clobbers)
return 0; return 0;
/* If this was a dry run, tell it we found clobbers to sink. */
if (found_opportunity)
{
*found_opportunity = true;
return 0;
}
edge succe = single_succ_edge (bb); edge succe = single_succ_edge (bb);
succbb = succe->dest; succbb = succe->dest;
/* See if there is a virtual PHI node to take an updated virtual /* See if there is a virtual PHI node to take an updated virtual
operand from. */ operand from. */
gphi *vphi = NULL; gphi *vphi = NULL;
tree vuse = NULL_TREE;
for (gphi_iterator gpi = gsi_start_phis (succbb); for (gphi_iterator gpi = gsi_start_phis (succbb);
!gsi_end_p (gpi); gsi_next (&gpi)) !gsi_end_p (gpi); gsi_next (&gpi))
{ {
...@@ -3605,11 +3612,12 @@ sink_clobbers (basic_block bb) ...@@ -3605,11 +3612,12 @@ sink_clobbers (basic_block bb)
if (virtual_operand_p (res)) if (virtual_operand_p (res))
{ {
vphi = gpi.phi (); vphi = gpi.phi ();
vuse = res;
break; break;
} }
} }
gimple *first_sunk = NULL;
gimple *last_sunk = NULL;
dgsi = gsi_after_labels (succbb); dgsi = gsi_after_labels (succbb);
gsi = gsi_last_bb (bb); gsi = gsi_last_bb (bb);
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
...@@ -3641,36 +3649,37 @@ sink_clobbers (basic_block bb) ...@@ -3641,36 +3649,37 @@ sink_clobbers (basic_block bb)
forwarder edge we can keep virtual operands in place. */ forwarder edge we can keep virtual operands in place. */
gsi_remove (&gsi, false); gsi_remove (&gsi, false);
gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT); gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
if (!first_sunk)
/* But adjust virtual operands if we sunk across a PHI node. */ first_sunk = stmt;
if (vuse) last_sunk = stmt;
}
if (first_sunk)
{
/* Adjust virtual operands if we sunk across a virtual PHI. */
if (vphi)
{ {
gimple *use_stmt;
imm_use_iterator iter; imm_use_iterator iter;
use_operand_p use_p; use_operand_p use_p;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse) gimple *use_stmt;
tree phi_def = gimple_phi_result (vphi);
FOR_EACH_IMM_USE_STMT (use_stmt, iter, phi_def)
FOR_EACH_IMM_USE_ON_STMT (use_p, iter) FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
SET_USE (use_p, gimple_vdef (stmt)); SET_USE (use_p, gimple_vdef (first_sunk));
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse)) if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phi_def))
{ {
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1; SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (first_sunk)) = 1;
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0; SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phi_def) = 0;
} }
/* Adjust the incoming virtual operand. */ SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe),
SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt)); gimple_vuse (last_sunk));
SET_USE (gimple_vuse_op (stmt), vuse); SET_USE (gimple_vuse_op (last_sunk), phi_def);
} }
/* If there isn't a single predecessor but no virtual PHI node /* If there isn't a single predecessor but no virtual PHI node
arrange for virtual operands to be renamed. */ arrange for virtual operands to be renamed. */
else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P else if (!single_pred_p (succbb)
&& !single_pred_p (succbb)) && TREE_CODE (gimple_vuse (last_sunk)) == SSA_NAME)
{ {
/* In this case there will be no use of the VDEF of this stmt. mark_virtual_operand_for_renaming (gimple_vuse (last_sunk));
??? Unless this is a secondary opportunity and we have not
removed unreachable blocks yet, so we cannot assert this.
Which also means we will end up renaming too many times. */
SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
mark_virtual_operands_for_renaming (cfun);
todo |= TODO_update_ssa_only_virtuals; todo |= TODO_update_ssa_only_virtuals;
} }
} }
...@@ -3863,6 +3872,7 @@ pass_lower_eh_dispatch::execute (function *fun) ...@@ -3863,6 +3872,7 @@ pass_lower_eh_dispatch::execute (function *fun)
basic_block bb; basic_block bb;
int flags = 0; int flags = 0;
bool redirected = false; bool redirected = false;
bool any_resx_to_process = false;
assign_filter_values (); assign_filter_values ();
...@@ -3879,18 +3889,37 @@ pass_lower_eh_dispatch::execute (function *fun) ...@@ -3879,18 +3889,37 @@ pass_lower_eh_dispatch::execute (function *fun)
} }
else if (gimple_code (last) == GIMPLE_RESX) else if (gimple_code (last) == GIMPLE_RESX)
{ {
if (stmt_can_throw_external (cfun, last)) if (stmt_can_throw_external (fun, last))
optimize_clobbers (bb); optimize_clobbers (bb);
else else if (!any_resx_to_process)
flags |= sink_clobbers (bb); sink_clobbers (bb, &any_resx_to_process);
} }
} }
if (redirected) if (redirected)
{ {
free_dominance_info (CDI_DOMINATORS); free_dominance_info (CDI_DOMINATORS);
delete_unreachable_blocks (); delete_unreachable_blocks ();
} }
if (any_resx_to_process)
{
/* Make sure to catch all secondary sinking opportunities by processing
blocks in RPO order and after all CFG modifications from lowering
and unreachable block removal. */
int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun));
int rpo_n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, false);
for (int i = 0; i < rpo_n; ++i)
{
bb = BASIC_BLOCK_FOR_FN (fun, rpo[i]);
gimple *last = last_stmt (bb);
if (last
&& gimple_code (last) == GIMPLE_RESX
&& !stmt_can_throw_external (fun, last))
flags |= sink_clobbers (bb);
}
free (rpo);
}
return flags; return flags;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment