Commit 5de583cc by Richard Biener Committed by Richard Biener

tree-ssa-sccvn.h (vn_eliminate): Declare.

2017-10-25  Richard Biener  <rguenther@suse.de>

	* tree-ssa-sccvn.h (vn_eliminate): Declare.
	* tree-ssa-pre.c (class eliminate_dom_walker, eliminate,
	class pass_fre): Move to ...
	* tree-ssa-sccvn.c (class eliminate_dom_walker, vn_eliminate,
	class pass_fre): ... here and adjust for statistics.

From-SVN: r254074
parent a596f497
2017-10-25 Richard Biener <rguenther@suse.de>
* tree-ssa-sccvn.h (vn_eliminate): Declare.
* tree-ssa-pre.c (class eliminate_dom_walker, eliminate,
class pass_fre): Move to ...
* tree-ssa-sccvn.c (class eliminate_dom_walker, vn_eliminate,
class pass_fre): ... here and adjust for statistics.
2017-10-25 Jakub Jelinek <jakub@redhat.com>
PR libstdc++/81706
......@@ -39,7 +39,6 @@ along with GCC; see the file COPYING3. If not see
#include "gimplify.h"
#include "gimple-iterator.h"
#include "tree-cfg.h"
#include "tree-ssa-loop.h"
#include "tree-into-ssa.h"
#include "tree-dfa.h"
#include "tree-ssa.h"
......@@ -50,9 +49,7 @@ along with GCC; see the file COPYING3. If not see
#include "dbgcnt.h"
#include "domwalk.h"
#include "tree-ssa-propagate.h"
#include "ipa-utils.h"
#include "tree-cfgcleanup.h"
#include "langhooks.h"
#include "alias.h"
/* Even though this file is called tree-ssa-pre.c, we actually
......@@ -516,9 +513,6 @@ typedef struct bb_bitmap_sets
optimization PRE was able to perform. */
static struct
{
/* The number of RHS computations eliminated by PRE. */
int eliminations;
/* The number of new expressions/temporaries generated by PRE. */
int insertions;
......@@ -4036,807 +4030,6 @@ compute_avail (void)
free (worklist);
}
class eliminate_dom_walker : public dom_walker
{
public:
eliminate_dom_walker (cdi_direction direction, bool do_pre_);
~eliminate_dom_walker ();
virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
tree eliminate_avail (tree op);
void eliminate_push_avail (tree op);
tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
bool do_pre;
unsigned int el_todo;
/* Blocks with statements that have had their EH properties changed. */
bitmap need_eh_cleanup;
/* Blocks with statements that have had their AB properties changed. */
bitmap need_ab_cleanup;
auto_vec<gimple *> to_remove;
auto_vec<gimple *> to_fixup;
auto_vec<tree> avail;
auto_vec<tree> avail_stack;
};
eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
bool do_pre_)
: dom_walker (direction), do_pre (do_pre_), el_todo (0)
{
need_eh_cleanup = BITMAP_ALLOC (NULL);
need_ab_cleanup = BITMAP_ALLOC (NULL);
}
eliminate_dom_walker::~eliminate_dom_walker ()
{
BITMAP_FREE (need_eh_cleanup);
BITMAP_FREE (need_ab_cleanup);
}
/* Return a leader for OP that is available at the current point of the
eliminate domwalk. */
tree
eliminate_dom_walker::eliminate_avail (tree op)
{
tree valnum = VN_INFO (op)->valnum;
if (TREE_CODE (valnum) == SSA_NAME)
{
if (SSA_NAME_IS_DEFAULT_DEF (valnum))
return valnum;
if (avail.length () > SSA_NAME_VERSION (valnum))
return avail[SSA_NAME_VERSION (valnum)];
}
else if (is_gimple_min_invariant (valnum))
return valnum;
return NULL_TREE;
}
/* At the current point of the eliminate domwalk make OP available. */
void
eliminate_dom_walker::eliminate_push_avail (tree op)
{
tree valnum = VN_INFO (op)->valnum;
if (TREE_CODE (valnum) == SSA_NAME)
{
if (avail.length () <= SSA_NAME_VERSION (valnum))
avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
tree pushop = op;
if (avail[SSA_NAME_VERSION (valnum)])
pushop = avail[SSA_NAME_VERSION (valnum)];
avail_stack.safe_push (pushop);
avail[SSA_NAME_VERSION (valnum)] = op;
}
}
/* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
the leader for the expression if insertion was successful. */
tree
eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
{
/* We can insert a sequence with a single assignment only. */
gimple_seq stmts = VN_INFO (val)->expr;
if (!gimple_seq_singleton_p (stmts))
return NULL_TREE;
gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
if (!stmt
|| (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
&& gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
&& gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
&& (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
|| TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
return NULL_TREE;
tree op = gimple_assign_rhs1 (stmt);
if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
|| gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
op = TREE_OPERAND (op, 0);
tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
if (!leader)
return NULL_TREE;
tree res;
stmts = NULL;
if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
res = gimple_build (&stmts, BIT_FIELD_REF,
TREE_TYPE (val), leader,
TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
res = gimple_build (&stmts, BIT_AND_EXPR,
TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
else
res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
TREE_TYPE (val), leader);
if (TREE_CODE (res) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (res)
|| gimple_bb (SSA_NAME_DEF_STMT (res)))
{
gimple_seq_discard (stmts);
/* During propagation we have to treat SSA info conservatively
and thus we can end up simplifying the inserted expression
at elimination time to sth not defined in stmts. */
/* But then this is a redundancy we failed to detect. Which means
res now has two values. That doesn't play well with how
we track availability here, so give up. */
if (dump_file && (dump_flags & TDF_DETAILS))
{
if (TREE_CODE (res) == SSA_NAME)
res = eliminate_avail (res);
if (res)
{
fprintf (dump_file, "Failed to insert expression for value ");
print_generic_expr (dump_file, val);
fprintf (dump_file, " which is really fully redundant to ");
print_generic_expr (dump_file, res);
fprintf (dump_file, "\n");
}
}
return NULL_TREE;
}
else
{
gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
VN_INFO_GET (res)->valnum = val;
}
pre_stats.insertions++;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Inserted ");
print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
}
return res;
}
/* Perform elimination for the basic-block B during the domwalk. */
edge
eliminate_dom_walker::before_dom_children (basic_block b)
{
/* Mark new bb. */
avail_stack.safe_push (NULL_TREE);
/* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
edge_iterator ei;
edge e;
FOR_EACH_EDGE (e, ei, b->preds)
if (e->flags & EDGE_EXECUTABLE)
break;
if (! e)
return NULL;
for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
{
gphi *phi = gsi.phi ();
tree res = PHI_RESULT (phi);
if (virtual_operand_p (res))
{
gsi_next (&gsi);
continue;
}
tree sprime = eliminate_avail (res);
if (sprime
&& sprime != res)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Replaced redundant PHI node defining ");
print_generic_expr (dump_file, res);
fprintf (dump_file, " with ");
print_generic_expr (dump_file, sprime);
fprintf (dump_file, "\n");
}
/* If we inserted this PHI node ourself, it's not an elimination. */
if (inserted_exprs
&& bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
pre_stats.phis--;
else
pre_stats.eliminations++;
/* If we will propagate into all uses don't bother to do
anything. */
if (may_propagate_copy (res, sprime))
{
/* Mark the PHI for removal. */
to_remove.safe_push (phi);
gsi_next (&gsi);
continue;
}
remove_phi_node (&gsi, false);
if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
sprime = fold_convert (TREE_TYPE (res), sprime);
gimple *stmt = gimple_build_assign (res, sprime);
gimple_stmt_iterator gsi2 = gsi_after_labels (b);
gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
continue;
}
eliminate_push_avail (res);
gsi_next (&gsi);
}
for (gimple_stmt_iterator gsi = gsi_start_bb (b);
!gsi_end_p (gsi);
gsi_next (&gsi))
{
tree sprime = NULL_TREE;
gimple *stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (lhs && TREE_CODE (lhs) == SSA_NAME
&& !gimple_has_volatile_ops (stmt)
/* See PR43491. Do not replace a global register variable when
it is a the RHS of an assignment. Do replace local register
variables since gcc does not guarantee a local variable will
be allocated in register.
??? The fix isn't effective here. This should instead
be ensured by not value-numbering them the same but treating
them like volatiles? */
&& !(gimple_assign_single_p (stmt)
&& (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
&& DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
&& is_global_var (gimple_assign_rhs1 (stmt)))))
{
sprime = eliminate_avail (lhs);
if (!sprime)
{
/* If there is no existing usable leader but SCCVN thinks
it has an expression it wants to use as replacement,
insert that. */
tree val = VN_INFO (lhs)->valnum;
if (val != VN_TOP
&& TREE_CODE (val) == SSA_NAME
&& VN_INFO (val)->needs_insertion
&& VN_INFO (val)->expr != NULL
&& (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
eliminate_push_avail (sprime);
}
/* If this now constitutes a copy duplicate points-to
and range info appropriately. This is especially
important for inserted code. See tree-ssa-copy.c
for similar code. */
if (sprime
&& TREE_CODE (sprime) == SSA_NAME)
{
basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
if (POINTER_TYPE_P (TREE_TYPE (lhs))
&& VN_INFO_PTR_INFO (lhs)
&& ! VN_INFO_PTR_INFO (sprime))
{
duplicate_ssa_name_ptr_info (sprime,
VN_INFO_PTR_INFO (lhs));
if (b != sprime_b)
mark_ptr_info_alignment_unknown
(SSA_NAME_PTR_INFO (sprime));
}
else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
&& VN_INFO_RANGE_INFO (lhs)
&& ! VN_INFO_RANGE_INFO (sprime)
&& b == sprime_b)
duplicate_ssa_name_range_info (sprime,
VN_INFO_RANGE_TYPE (lhs),
VN_INFO_RANGE_INFO (lhs));
}
/* Inhibit the use of an inserted PHI on a loop header when
the address of the memory reference is a simple induction
variable. In other cases the vectorizer won't do anything
anyway (either it's loop invariant or a complicated
expression). */
if (sprime
&& TREE_CODE (sprime) == SSA_NAME
&& do_pre
&& (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
&& loop_outer (b->loop_father)
&& has_zero_uses (sprime)
&& bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
&& gimple_assign_load_p (stmt))
{
gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
basic_block def_bb = gimple_bb (def_stmt);
if (gimple_code (def_stmt) == GIMPLE_PHI
&& def_bb->loop_father->header == def_bb)
{
loop_p loop = def_bb->loop_father;
ssa_op_iter iter;
tree op;
bool found = false;
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
{
affine_iv iv;
def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
if (def_bb
&& flow_bb_inside_loop_p (loop, def_bb)
&& simple_iv (loop, loop, op, &iv, true))
{
found = true;
break;
}
}
if (found)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Not replacing ");
print_gimple_expr (dump_file, stmt, 0);
fprintf (dump_file, " with ");
print_generic_expr (dump_file, sprime);
fprintf (dump_file, " which would add a loop"
" carried dependence to loop %d\n",
loop->num);
}
/* Don't keep sprime available. */
sprime = NULL_TREE;
}
}
}
if (sprime)
{
/* If we can propagate the value computed for LHS into
all uses don't bother doing anything with this stmt. */
if (may_propagate_copy (lhs, sprime))
{
/* Mark it for removal. */
to_remove.safe_push (stmt);
/* ??? Don't count copy/constant propagations. */
if (gimple_assign_single_p (stmt)
&& (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
|| gimple_assign_rhs1 (stmt) == sprime))
continue;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Replaced ");
print_gimple_expr (dump_file, stmt, 0);
fprintf (dump_file, " with ");
print_generic_expr (dump_file, sprime);
fprintf (dump_file, " in all uses of ");
print_gimple_stmt (dump_file, stmt, 0);
}
pre_stats.eliminations++;
continue;
}
/* If this is an assignment from our leader (which
happens in the case the value-number is a constant)
then there is nothing to do. */
if (gimple_assign_single_p (stmt)
&& sprime == gimple_assign_rhs1 (stmt))
continue;
/* Else replace its RHS. */
bool can_make_abnormal_goto
= is_gimple_call (stmt)
&& stmt_can_make_abnormal_goto (stmt);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Replaced ");
print_gimple_expr (dump_file, stmt, 0);
fprintf (dump_file, " with ");
print_generic_expr (dump_file, sprime);
fprintf (dump_file, " in ");
print_gimple_stmt (dump_file, stmt, 0);
}
pre_stats.eliminations++;
gimple *orig_stmt = stmt;
if (!useless_type_conversion_p (TREE_TYPE (lhs),
TREE_TYPE (sprime)))
sprime = fold_convert (TREE_TYPE (lhs), sprime);
tree vdef = gimple_vdef (stmt);
tree vuse = gimple_vuse (stmt);
propagate_tree_value_into_stmt (&gsi, sprime);
stmt = gsi_stmt (gsi);
update_stmt (stmt);
if (vdef != gimple_vdef (stmt))
VN_INFO (vdef)->valnum = vuse;
/* If we removed EH side-effects from the statement, clean
its EH information. */
if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed EH side-effects.\n");
}
/* Likewise for AB side-effects. */
if (can_make_abnormal_goto
&& !stmt_can_make_abnormal_goto (stmt))
{
bitmap_set_bit (need_ab_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed AB side-effects.\n");
}
continue;
}
}
/* If the statement is a scalar store, see if the expression
has the same value number as its rhs. If so, the store is
dead. */
if (gimple_assign_single_p (stmt)
&& !gimple_has_volatile_ops (stmt)
&& !is_gimple_reg (gimple_assign_lhs (stmt))
&& (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
|| is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
{
tree val;
tree rhs = gimple_assign_rhs1 (stmt);
vn_reference_t vnresult;
val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
&vnresult, false);
if (TREE_CODE (rhs) == SSA_NAME)
rhs = VN_INFO (rhs)->valnum;
if (val
&& operand_equal_p (val, rhs, 0))
{
/* We can only remove the later store if the former aliases
at least all accesses the later one does or if the store
was to readonly memory storing the same value. */
alias_set_type set = get_alias_set (lhs);
if (! vnresult
|| vnresult->set == set
|| alias_set_subset_of (set, vnresult->set))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Deleted redundant store ");
print_gimple_stmt (dump_file, stmt, 0);
}
/* Queue stmt for removal. */
to_remove.safe_push (stmt);
continue;
}
}
}
/* If this is a control statement value numbering left edges
unexecuted on force the condition in a way consistent with
that. */
if (gcond *cond = dyn_cast <gcond *> (stmt))
{
if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Removing unexecutable edge from ");
print_gimple_stmt (dump_file, stmt, 0);
}
if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
== ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
gimple_cond_make_true (cond);
else
gimple_cond_make_false (cond);
update_stmt (cond);
el_todo |= TODO_cleanup_cfg;
continue;
}
}
bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
bool was_noreturn = (is_gimple_call (stmt)
&& gimple_call_noreturn_p (stmt));
tree vdef = gimple_vdef (stmt);
tree vuse = gimple_vuse (stmt);
/* If we didn't replace the whole stmt (or propagate the result
into all uses), replace all uses on this stmt with their
leaders. */
bool modified = false;
use_operand_p use_p;
ssa_op_iter iter;
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
tree use = USE_FROM_PTR (use_p);
/* ??? The call code above leaves stmt operands un-updated. */
if (TREE_CODE (use) != SSA_NAME)
continue;
tree sprime = eliminate_avail (use);
if (sprime && sprime != use
&& may_propagate_copy (use, sprime)
/* We substitute into debug stmts to avoid excessive
debug temporaries created by removed stmts, but we need
to avoid doing so for inserted sprimes as we never want
to create debug temporaries for them. */
&& (!inserted_exprs
|| TREE_CODE (sprime) != SSA_NAME
|| !is_gimple_debug (stmt)
|| !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
{
propagate_value (use_p, sprime);
modified = true;
}
}
/* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
into which is a requirement for the IPA devirt machinery. */
gimple *old_stmt = stmt;
if (modified)
{
/* If a formerly non-invariant ADDR_EXPR is turned into an
invariant one it was on a separate stmt. */
if (gimple_assign_single_p (stmt)
&& TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
gimple_stmt_iterator prev = gsi;
gsi_prev (&prev);
if (fold_stmt (&gsi))
{
/* fold_stmt may have created new stmts inbetween
the previous stmt and the folded stmt. Mark
all defs created there as varying to not confuse
the SCCVN machinery as we're using that even during
elimination. */
if (gsi_end_p (prev))
prev = gsi_start_bb (b);
else
gsi_next (&prev);
if (gsi_stmt (prev) != gsi_stmt (gsi))
do
{
tree def;
ssa_op_iter dit;
FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
dit, SSA_OP_ALL_DEFS)
/* As existing DEFs may move between stmts
we have to guard VN_INFO_GET. */
if (! has_VN_INFO (def))
VN_INFO_GET (def)->valnum = def;
if (gsi_stmt (prev) == gsi_stmt (gsi))
break;
gsi_next (&prev);
}
while (1);
}
stmt = gsi_stmt (gsi);
/* In case we folded the stmt away schedule the NOP for removal. */
if (gimple_nop_p (stmt))
to_remove.safe_push (stmt);
}
/* Visit indirect calls and turn them into direct calls if
possible using the devirtualization machinery. Do this before
checking for required EH/abnormal/noreturn cleanup as devird
may expose more of those. */
if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
tree fn = gimple_call_fn (call_stmt);
if (fn
&& flag_devirtualize
&& virtual_method_call_p (fn))
{
tree otr_type = obj_type_ref_class (fn);
unsigned HOST_WIDE_INT otr_tok
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
tree instance;
ipa_polymorphic_call_context context (current_function_decl,
fn, stmt, &instance);
context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
otr_type, stmt);
bool final;
vec <cgraph_node *> targets
= possible_polymorphic_call_targets (obj_type_ref_class (fn),
otr_tok, context, &final);
if (dump_file)
dump_possible_polymorphic_call_targets (dump_file,
obj_type_ref_class (fn),
otr_tok, context);
if (final && targets.length () <= 1 && dbg_cnt (devirt))
{
tree fn;
if (targets.length () == 1)
fn = targets[0]->decl;
else
fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
if (dump_enabled_p ())
{
location_t loc = gimple_location (stmt);
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
"converting indirect call to "
"function %s\n",
lang_hooks.decl_printable_name (fn, 2));
}
gimple_call_set_fndecl (call_stmt, fn);
/* If changing the call to __builtin_unreachable
or similar noreturn function, adjust gimple_call_fntype
too. */
if (gimple_call_noreturn_p (call_stmt)
&& VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
&& TYPE_ARG_TYPES (TREE_TYPE (fn))
&& (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
== void_type_node))
gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
maybe_remove_unused_call_args (cfun, call_stmt);
modified = true;
}
}
}
if (modified)
{
/* When changing a call into a noreturn call, cfg cleanup
is needed to fix up the noreturn call. */
if (!was_noreturn
&& is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
to_fixup.safe_push (stmt);
/* When changing a condition or switch into one we know what
edge will be executed, schedule a cfg cleanup. */
if ((gimple_code (stmt) == GIMPLE_COND
&& (gimple_cond_true_p (as_a <gcond *> (stmt))
|| gimple_cond_false_p (as_a <gcond *> (stmt))))
|| (gimple_code (stmt) == GIMPLE_SWITCH
&& TREE_CODE (gimple_switch_index
(as_a <gswitch *> (stmt))) == INTEGER_CST))
el_todo |= TODO_cleanup_cfg;
/* If we removed EH side-effects from the statement, clean
its EH information. */
if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed EH side-effects.\n");
}
/* Likewise for AB side-effects. */
if (can_make_abnormal_goto
&& !stmt_can_make_abnormal_goto (stmt))
{
bitmap_set_bit (need_ab_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed AB side-effects.\n");
}
update_stmt (stmt);
if (vdef != gimple_vdef (stmt))
VN_INFO (vdef)->valnum = vuse;
}
/* Make new values available - for fully redundant LHS we
continue with the next stmt above and skip this. */
def_operand_p defp;
FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
eliminate_push_avail (DEF_FROM_PTR (defp));
}
/* Replace destination PHI arguments. */
FOR_EACH_EDGE (e, ei, b->succs)
if (e->flags & EDGE_EXECUTABLE)
for (gphi_iterator gsi = gsi_start_phis (e->dest);
!gsi_end_p (gsi);
gsi_next (&gsi))
{
gphi *phi = gsi.phi ();
use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
tree arg = USE_FROM_PTR (use_p);
if (TREE_CODE (arg) != SSA_NAME
|| virtual_operand_p (arg))
continue;
tree sprime = eliminate_avail (arg);
if (sprime && may_propagate_copy (arg, sprime))
propagate_value (use_p, sprime);
}
return NULL;
}
/* Make no longer available leaders no longer available. */
void
eliminate_dom_walker::after_dom_children (basic_block)
{
tree entry;
while ((entry = avail_stack.pop ()) != NULL_TREE)
{
tree valnum = VN_INFO (entry)->valnum;
tree old = avail[SSA_NAME_VERSION (valnum)];
if (old == entry)
avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
else
avail[SSA_NAME_VERSION (valnum)] = entry;
}
}
/* Eliminate fully redundant computations. */
static unsigned int
eliminate (bool do_pre)
{
eliminate_dom_walker el (CDI_DOMINATORS, do_pre);
el.avail.reserve (num_ssa_names);
el.walk (cfun->cfg->x_entry_block_ptr);
/* We cannot remove stmts during BB walk, especially not release SSA
names there as this confuses the VN machinery. The stmts ending
up in to_remove are either stores or simple copies.
Remove stmts in reverse order to make debug stmt creation possible. */
while (!el.to_remove.is_empty ())
{
gimple *stmt = el.to_remove.pop ();
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Removing dead stmt ");
print_gimple_stmt (dump_file, stmt, 0, 0);
}
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
if (gimple_code (stmt) == GIMPLE_PHI)
remove_phi_node (&gsi, true);
else
{
basic_block bb = gimple_bb (stmt);
unlink_stmt_vdef (stmt);
if (gsi_remove (&gsi, true))
bitmap_set_bit (el.need_eh_cleanup, bb->index);
if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
bitmap_set_bit (el.need_ab_cleanup, bb->index);
release_defs (stmt);
}
/* Removing a stmt may expose a forwarder block. */
el.el_todo |= TODO_cleanup_cfg;
}
/* Fixup stmts that became noreturn calls. This may require splitting
blocks and thus isn't possible during the dominator walk. Do this
in reverse order so we don't inadvertedly remove a stmt we want to
fixup by visiting a dominating now noreturn call first. */
while (!el.to_fixup.is_empty ())
{
gimple *stmt = el.to_fixup.pop ();
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Fixing up noreturn call ");
print_gimple_stmt (dump_file, stmt, 0);
}
if (fixup_noreturn_call (stmt))
el.el_todo |= TODO_cleanup_cfg;
}
bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
if (do_eh_cleanup)
gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
if (do_ab_cleanup)
gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
if (do_eh_cleanup || do_ab_cleanup)
el.el_todo |= TODO_cleanup_cfg;
return el.el_todo;
}
/* Cheap DCE of a known set of possibly dead stmts.
Because we don't follow exactly the standard PRE algorithm, and decide not
......@@ -5023,13 +4216,12 @@ pass_pre::execute (function *fun)
gcc_assert (!need_ssa_update_p (fun));
/* Remove all the redundant expressions. */
todo |= eliminate (true);
todo |= vn_eliminate (inserted_exprs);
statistics_counter_event (fun, "Insertions", pre_stats.insertions);
statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
statistics_counter_event (fun, "New PHIs", pre_stats.phis);
statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
clear_expression_ids ();
......@@ -5069,61 +4261,3 @@ make_pass_pre (gcc::context *ctxt)
{
return new pass_pre (ctxt);
}
namespace {
const pass_data pass_data_fre =
{
GIMPLE_PASS, /* type */
"fre", /* name */
OPTGROUP_NONE, /* optinfo_flags */
TV_TREE_FRE, /* tv_id */
( PROP_cfg | PROP_ssa ), /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
0, /* todo_flags_finish */
};
class pass_fre : public gimple_opt_pass
{
public:
pass_fre (gcc::context *ctxt)
: gimple_opt_pass (pass_data_fre, ctxt)
{}
/* opt_pass methods: */
opt_pass * clone () { return new pass_fre (m_ctxt); }
virtual bool gate (function *) { return flag_tree_fre != 0; }
virtual unsigned int execute (function *);
}; // class pass_fre
unsigned int
pass_fre::execute (function *fun)
{
unsigned int todo = 0;
run_scc_vn (VN_WALKREWRITE);
memset (&pre_stats, 0, sizeof (pre_stats));
/* Remove all the redundant expressions. */
todo |= eliminate (false);
scc_vn_restore_ssa_info ();
free_scc_vn ();
statistics_counter_event (fun, "Insertions", pre_stats.insertions);
statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
return todo;
}
} // anon namespace
gimple_opt_pass *
make_pass_fre (gcc::context *ctxt)
{
return new pass_fre (ctxt);
}
......@@ -55,13 +55,21 @@ along with GCC; see the file COPYING3. If not see
#include "cfgloop.h"
#include "params.h"
#include "tree-ssa-propagate.h"
#include "tree-ssa-sccvn.h"
#include "tree-cfg.h"
#include "domwalk.h"
#include "gimple-iterator.h"
#include "gimple-match.h"
#include "stringpool.h"
#include "attribs.h"
#include "tree-pass.h"
#include "statistics.h"
#include "langhooks.h"
#include "ipa-utils.h"
#include "dbgcnt.h"
#include "tree-cfgcleanup.h"
#include "tree-ssa-loop.h"
#include "tree-scalar-evolution.h"
#include "tree-ssa-sccvn.h"
/* This algorithm is based on the SCC algorithm presented by Keith
Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
......@@ -5149,3 +5157,868 @@ vn_nary_may_trap (vn_nary_op_t nary)
return false;
}
class eliminate_dom_walker : public dom_walker
{
public:
eliminate_dom_walker (cdi_direction, bitmap);
~eliminate_dom_walker ();
virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
tree eliminate_avail (tree op);
void eliminate_push_avail (tree op);
tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
bool do_pre;
unsigned int el_todo;
unsigned int eliminations;
unsigned int insertions;
/* SSA names that had their defs inserted by PRE if do_pre. */
bitmap inserted_exprs;
/* Blocks with statements that have had their EH properties changed. */
bitmap need_eh_cleanup;
/* Blocks with statements that have had their AB properties changed. */
bitmap need_ab_cleanup;
auto_vec<gimple *> to_remove;
auto_vec<gimple *> to_fixup;
auto_vec<tree> avail;
auto_vec<tree> avail_stack;
};
eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
bitmap inserted_exprs_)
: dom_walker (direction), do_pre (inserted_exprs_ != NULL),
el_todo (0), eliminations (0), insertions (0),
inserted_exprs (inserted_exprs_)
{
need_eh_cleanup = BITMAP_ALLOC (NULL);
need_ab_cleanup = BITMAP_ALLOC (NULL);
}
eliminate_dom_walker::~eliminate_dom_walker ()
{
BITMAP_FREE (need_eh_cleanup);
BITMAP_FREE (need_ab_cleanup);
}
/* Return a leader for OP that is available at the current point of the
eliminate domwalk. */
tree
eliminate_dom_walker::eliminate_avail (tree op)
{
tree valnum = VN_INFO (op)->valnum;
if (TREE_CODE (valnum) == SSA_NAME)
{
if (SSA_NAME_IS_DEFAULT_DEF (valnum))
return valnum;
if (avail.length () > SSA_NAME_VERSION (valnum))
return avail[SSA_NAME_VERSION (valnum)];
}
else if (is_gimple_min_invariant (valnum))
return valnum;
return NULL_TREE;
}
/* At the current point of the eliminate domwalk make OP available. */
void
eliminate_dom_walker::eliminate_push_avail (tree op)
{
tree valnum = VN_INFO (op)->valnum;
if (TREE_CODE (valnum) == SSA_NAME)
{
if (avail.length () <= SSA_NAME_VERSION (valnum))
avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
tree pushop = op;
if (avail[SSA_NAME_VERSION (valnum)])
pushop = avail[SSA_NAME_VERSION (valnum)];
avail_stack.safe_push (pushop);
avail[SSA_NAME_VERSION (valnum)] = op;
}
}
/* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
the leader for the expression if insertion was successful. */
tree
eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
{
/* We can insert a sequence with a single assignment only. */
gimple_seq stmts = VN_INFO (val)->expr;
if (!gimple_seq_singleton_p (stmts))
return NULL_TREE;
gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
if (!stmt
|| (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
&& gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
&& gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
&& (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
|| TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
return NULL_TREE;
tree op = gimple_assign_rhs1 (stmt);
if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
|| gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
op = TREE_OPERAND (op, 0);
tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
if (!leader)
return NULL_TREE;
tree res;
stmts = NULL;
if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
res = gimple_build (&stmts, BIT_FIELD_REF,
TREE_TYPE (val), leader,
TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
res = gimple_build (&stmts, BIT_AND_EXPR,
TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
else
res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
TREE_TYPE (val), leader);
if (TREE_CODE (res) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (res)
|| gimple_bb (SSA_NAME_DEF_STMT (res)))
{
gimple_seq_discard (stmts);
/* During propagation we have to treat SSA info conservatively
and thus we can end up simplifying the inserted expression
at elimination time to sth not defined in stmts. */
/* But then this is a redundancy we failed to detect. Which means
res now has two values. That doesn't play well with how
we track availability here, so give up. */
if (dump_file && (dump_flags & TDF_DETAILS))
{
if (TREE_CODE (res) == SSA_NAME)
res = eliminate_avail (res);
if (res)
{
fprintf (dump_file, "Failed to insert expression for value ");
print_generic_expr (dump_file, val);
fprintf (dump_file, " which is really fully redundant to ");
print_generic_expr (dump_file, res);
fprintf (dump_file, "\n");
}
}
return NULL_TREE;
}
else
{
gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
VN_INFO_GET (res)->valnum = val;
}
insertions++;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Inserted ");
print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
}
return res;
}
/* Perform elimination for the basic-block B during the domwalk. */
edge
eliminate_dom_walker::before_dom_children (basic_block b)
{
/* Mark new bb. */
avail_stack.safe_push (NULL_TREE);
/* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
edge_iterator ei;
edge e;
FOR_EACH_EDGE (e, ei, b->preds)
if (e->flags & EDGE_EXECUTABLE)
break;
if (! e)
return NULL;
for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
{
gphi *phi = gsi.phi ();
tree res = PHI_RESULT (phi);
if (virtual_operand_p (res))
{
gsi_next (&gsi);
continue;
}
tree sprime = eliminate_avail (res);
if (sprime
&& sprime != res)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Replaced redundant PHI node defining ");
print_generic_expr (dump_file, res);
fprintf (dump_file, " with ");
print_generic_expr (dump_file, sprime);
fprintf (dump_file, "\n");
}
/* If we inserted this PHI node ourself, it's not an elimination. */
if (! inserted_exprs
|| ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
eliminations++;
/* If we will propagate into all uses don't bother to do
anything. */
if (may_propagate_copy (res, sprime))
{
/* Mark the PHI for removal. */
to_remove.safe_push (phi);
gsi_next (&gsi);
continue;
}
remove_phi_node (&gsi, false);
if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
sprime = fold_convert (TREE_TYPE (res), sprime);
gimple *stmt = gimple_build_assign (res, sprime);
gimple_stmt_iterator gsi2 = gsi_after_labels (b);
gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
continue;
}
eliminate_push_avail (res);
gsi_next (&gsi);
}
for (gimple_stmt_iterator gsi = gsi_start_bb (b);
!gsi_end_p (gsi);
gsi_next (&gsi))
{
tree sprime = NULL_TREE;
gimple *stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (lhs && TREE_CODE (lhs) == SSA_NAME
&& !gimple_has_volatile_ops (stmt)
/* See PR43491. Do not replace a global register variable when
it is a the RHS of an assignment. Do replace local register
variables since gcc does not guarantee a local variable will
be allocated in register.
??? The fix isn't effective here. This should instead
be ensured by not value-numbering them the same but treating
them like volatiles? */
&& !(gimple_assign_single_p (stmt)
&& (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
&& DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
&& is_global_var (gimple_assign_rhs1 (stmt)))))
{
sprime = eliminate_avail (lhs);
if (!sprime)
{
/* If there is no existing usable leader but SCCVN thinks
it has an expression it wants to use as replacement,
insert that. */
tree val = VN_INFO (lhs)->valnum;
if (val != VN_TOP
&& TREE_CODE (val) == SSA_NAME
&& VN_INFO (val)->needs_insertion
&& VN_INFO (val)->expr != NULL
&& (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
eliminate_push_avail (sprime);
}
/* If this now constitutes a copy duplicate points-to
and range info appropriately. This is especially
important for inserted code. See tree-ssa-copy.c
for similar code. */
if (sprime
&& TREE_CODE (sprime) == SSA_NAME)
{
basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
if (POINTER_TYPE_P (TREE_TYPE (lhs))
&& VN_INFO_PTR_INFO (lhs)
&& ! VN_INFO_PTR_INFO (sprime))
{
duplicate_ssa_name_ptr_info (sprime,
VN_INFO_PTR_INFO (lhs));
if (b != sprime_b)
mark_ptr_info_alignment_unknown
(SSA_NAME_PTR_INFO (sprime));
}
else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
&& VN_INFO_RANGE_INFO (lhs)
&& ! VN_INFO_RANGE_INFO (sprime)
&& b == sprime_b)
duplicate_ssa_name_range_info (sprime,
VN_INFO_RANGE_TYPE (lhs),
VN_INFO_RANGE_INFO (lhs));
}
/* Inhibit the use of an inserted PHI on a loop header when
the address of the memory reference is a simple induction
variable. In other cases the vectorizer won't do anything
anyway (either it's loop invariant or a complicated
expression). */
if (sprime
&& TREE_CODE (sprime) == SSA_NAME
&& do_pre
&& (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
&& loop_outer (b->loop_father)
&& has_zero_uses (sprime)
&& bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
&& gimple_assign_load_p (stmt))
{
gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
basic_block def_bb = gimple_bb (def_stmt);
if (gimple_code (def_stmt) == GIMPLE_PHI
&& def_bb->loop_father->header == def_bb)
{
loop_p loop = def_bb->loop_father;
ssa_op_iter iter;
tree op;
bool found = false;
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
{
affine_iv iv;
def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
if (def_bb
&& flow_bb_inside_loop_p (loop, def_bb)
&& simple_iv (loop, loop, op, &iv, true))
{
found = true;
break;
}
}
if (found)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Not replacing ");
print_gimple_expr (dump_file, stmt, 0);
fprintf (dump_file, " with ");
print_generic_expr (dump_file, sprime);
fprintf (dump_file, " which would add a loop"
" carried dependence to loop %d\n",
loop->num);
}
/* Don't keep sprime available. */
sprime = NULL_TREE;
}
}
}
if (sprime)
{
/* If we can propagate the value computed for LHS into
all uses don't bother doing anything with this stmt. */
if (may_propagate_copy (lhs, sprime))
{
/* Mark it for removal. */
to_remove.safe_push (stmt);
/* ??? Don't count copy/constant propagations. */
if (gimple_assign_single_p (stmt)
&& (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
|| gimple_assign_rhs1 (stmt) == sprime))
continue;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Replaced ");
print_gimple_expr (dump_file, stmt, 0);
fprintf (dump_file, " with ");
print_generic_expr (dump_file, sprime);
fprintf (dump_file, " in all uses of ");
print_gimple_stmt (dump_file, stmt, 0);
}
eliminations++;
continue;
}
/* If this is an assignment from our leader (which
happens in the case the value-number is a constant)
then there is nothing to do. */
if (gimple_assign_single_p (stmt)
&& sprime == gimple_assign_rhs1 (stmt))
continue;
/* Else replace its RHS. */
bool can_make_abnormal_goto
= is_gimple_call (stmt)
&& stmt_can_make_abnormal_goto (stmt);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Replaced ");
print_gimple_expr (dump_file, stmt, 0);
fprintf (dump_file, " with ");
print_generic_expr (dump_file, sprime);
fprintf (dump_file, " in ");
print_gimple_stmt (dump_file, stmt, 0);
}
eliminations++;
gimple *orig_stmt = stmt;
if (!useless_type_conversion_p (TREE_TYPE (lhs),
TREE_TYPE (sprime)))
sprime = fold_convert (TREE_TYPE (lhs), sprime);
tree vdef = gimple_vdef (stmt);
tree vuse = gimple_vuse (stmt);
propagate_tree_value_into_stmt (&gsi, sprime);
stmt = gsi_stmt (gsi);
update_stmt (stmt);
if (vdef != gimple_vdef (stmt))
VN_INFO (vdef)->valnum = vuse;
/* If we removed EH side-effects from the statement, clean
its EH information. */
if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed EH side-effects.\n");
}
/* Likewise for AB side-effects. */
if (can_make_abnormal_goto
&& !stmt_can_make_abnormal_goto (stmt))
{
bitmap_set_bit (need_ab_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed AB side-effects.\n");
}
continue;
}
}
/* If the statement is a scalar store, see if the expression
has the same value number as its rhs. If so, the store is
dead. */
if (gimple_assign_single_p (stmt)
&& !gimple_has_volatile_ops (stmt)
&& !is_gimple_reg (gimple_assign_lhs (stmt))
&& (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
|| is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
{
tree val;
tree rhs = gimple_assign_rhs1 (stmt);
vn_reference_t vnresult;
val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
&vnresult, false);
if (TREE_CODE (rhs) == SSA_NAME)
rhs = VN_INFO (rhs)->valnum;
if (val
&& operand_equal_p (val, rhs, 0))
{
/* We can only remove the later store if the former aliases
at least all accesses the later one does or if the store
was to readonly memory storing the same value. */
alias_set_type set = get_alias_set (lhs);
if (! vnresult
|| vnresult->set == set
|| alias_set_subset_of (set, vnresult->set))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Deleted redundant store ");
print_gimple_stmt (dump_file, stmt, 0);
}
/* Queue stmt for removal. */
to_remove.safe_push (stmt);
continue;
}
}
}
/* If this is a control statement value numbering left edges
unexecuted on force the condition in a way consistent with
that. */
if (gcond *cond = dyn_cast <gcond *> (stmt))
{
if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Removing unexecutable edge from ");
print_gimple_stmt (dump_file, stmt, 0);
}
if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
== ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
gimple_cond_make_true (cond);
else
gimple_cond_make_false (cond);
update_stmt (cond);
el_todo |= TODO_cleanup_cfg;
continue;
}
}
bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
bool was_noreturn = (is_gimple_call (stmt)
&& gimple_call_noreturn_p (stmt));
tree vdef = gimple_vdef (stmt);
tree vuse = gimple_vuse (stmt);
/* If we didn't replace the whole stmt (or propagate the result
into all uses), replace all uses on this stmt with their
leaders. */
bool modified = false;
use_operand_p use_p;
ssa_op_iter iter;
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
tree use = USE_FROM_PTR (use_p);
/* ??? The call code above leaves stmt operands un-updated. */
if (TREE_CODE (use) != SSA_NAME)
continue;
tree sprime = eliminate_avail (use);
if (sprime && sprime != use
&& may_propagate_copy (use, sprime)
/* We substitute into debug stmts to avoid excessive
debug temporaries created by removed stmts, but we need
to avoid doing so for inserted sprimes as we never want
to create debug temporaries for them. */
&& (!inserted_exprs
|| TREE_CODE (sprime) != SSA_NAME
|| !is_gimple_debug (stmt)
|| !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
{
propagate_value (use_p, sprime);
modified = true;
}
}
/* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
into which is a requirement for the IPA devirt machinery. */
gimple *old_stmt = stmt;
if (modified)
{
/* If a formerly non-invariant ADDR_EXPR is turned into an
invariant one it was on a separate stmt. */
if (gimple_assign_single_p (stmt)
&& TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
gimple_stmt_iterator prev = gsi;
gsi_prev (&prev);
if (fold_stmt (&gsi))
{
/* fold_stmt may have created new stmts inbetween
the previous stmt and the folded stmt. Mark
all defs created there as varying to not confuse
the SCCVN machinery as we're using that even during
elimination. */
if (gsi_end_p (prev))
prev = gsi_start_bb (b);
else
gsi_next (&prev);
if (gsi_stmt (prev) != gsi_stmt (gsi))
do
{
tree def;
ssa_op_iter dit;
FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
dit, SSA_OP_ALL_DEFS)
/* As existing DEFs may move between stmts
we have to guard VN_INFO_GET. */
if (! has_VN_INFO (def))
VN_INFO_GET (def)->valnum = def;
if (gsi_stmt (prev) == gsi_stmt (gsi))
break;
gsi_next (&prev);
}
while (1);
}
stmt = gsi_stmt (gsi);
/* In case we folded the stmt away schedule the NOP for removal. */
if (gimple_nop_p (stmt))
to_remove.safe_push (stmt);
}
/* Visit indirect calls and turn them into direct calls if
possible using the devirtualization machinery. Do this before
checking for required EH/abnormal/noreturn cleanup as devird
may expose more of those. */
if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
tree fn = gimple_call_fn (call_stmt);
if (fn
&& flag_devirtualize
&& virtual_method_call_p (fn))
{
tree otr_type = obj_type_ref_class (fn);
unsigned HOST_WIDE_INT otr_tok
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
tree instance;
ipa_polymorphic_call_context context (current_function_decl,
fn, stmt, &instance);
context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
otr_type, stmt);
bool final;
vec <cgraph_node *> targets
= possible_polymorphic_call_targets (obj_type_ref_class (fn),
otr_tok, context, &final);
if (dump_file)
dump_possible_polymorphic_call_targets (dump_file,
obj_type_ref_class (fn),
otr_tok, context);
if (final && targets.length () <= 1 && dbg_cnt (devirt))
{
tree fn;
if (targets.length () == 1)
fn = targets[0]->decl;
else
fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
if (dump_enabled_p ())
{
location_t loc = gimple_location (stmt);
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
"converting indirect call to "
"function %s\n",
lang_hooks.decl_printable_name (fn, 2));
}
gimple_call_set_fndecl (call_stmt, fn);
/* If changing the call to __builtin_unreachable
or similar noreturn function, adjust gimple_call_fntype
too. */
if (gimple_call_noreturn_p (call_stmt)
&& VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
&& TYPE_ARG_TYPES (TREE_TYPE (fn))
&& (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
== void_type_node))
gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
maybe_remove_unused_call_args (cfun, call_stmt);
modified = true;
}
}
}
if (modified)
{
/* When changing a call into a noreturn call, cfg cleanup
is needed to fix up the noreturn call. */
if (!was_noreturn
&& is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
to_fixup.safe_push (stmt);
/* When changing a condition or switch into one we know what
edge will be executed, schedule a cfg cleanup. */
if ((gimple_code (stmt) == GIMPLE_COND
&& (gimple_cond_true_p (as_a <gcond *> (stmt))
|| gimple_cond_false_p (as_a <gcond *> (stmt))))
|| (gimple_code (stmt) == GIMPLE_SWITCH
&& TREE_CODE (gimple_switch_index
(as_a <gswitch *> (stmt))) == INTEGER_CST))
el_todo |= TODO_cleanup_cfg;
/* If we removed EH side-effects from the statement, clean
its EH information. */
if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed EH side-effects.\n");
}
/* Likewise for AB side-effects. */
if (can_make_abnormal_goto
&& !stmt_can_make_abnormal_goto (stmt))
{
bitmap_set_bit (need_ab_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed AB side-effects.\n");
}
update_stmt (stmt);
if (vdef != gimple_vdef (stmt))
VN_INFO (vdef)->valnum = vuse;
}
/* Make new values available - for fully redundant LHS we
continue with the next stmt above and skip this. */
def_operand_p defp;
FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
eliminate_push_avail (DEF_FROM_PTR (defp));
}
/* Replace destination PHI arguments. */
FOR_EACH_EDGE (e, ei, b->succs)
if (e->flags & EDGE_EXECUTABLE)
for (gphi_iterator gsi = gsi_start_phis (e->dest);
!gsi_end_p (gsi);
gsi_next (&gsi))
{
gphi *phi = gsi.phi ();
use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
tree arg = USE_FROM_PTR (use_p);
if (TREE_CODE (arg) != SSA_NAME
|| virtual_operand_p (arg))
continue;
tree sprime = eliminate_avail (arg);
if (sprime && may_propagate_copy (arg, sprime))
propagate_value (use_p, sprime);
}
return NULL;
}
/* Make no longer available leaders no longer available. */
void
eliminate_dom_walker::after_dom_children (basic_block)
{
tree entry;
while ((entry = avail_stack.pop ()) != NULL_TREE)
{
tree valnum = VN_INFO (entry)->valnum;
tree old = avail[SSA_NAME_VERSION (valnum)];
if (old == entry)
avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
else
avail[SSA_NAME_VERSION (valnum)] = entry;
}
}
/* Eliminate fully redundant computations. */
unsigned int
vn_eliminate (bitmap inserted_exprs)
{
eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
el.avail.reserve (num_ssa_names);
el.walk (cfun->cfg->x_entry_block_ptr);
/* We cannot remove stmts during BB walk, especially not release SSA
names there as this confuses the VN machinery. The stmts ending
up in to_remove are either stores or simple copies.
Remove stmts in reverse order to make debug stmt creation possible. */
while (!el.to_remove.is_empty ())
{
gimple *stmt = el.to_remove.pop ();
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Removing dead stmt ");
print_gimple_stmt (dump_file, stmt, 0, 0);
}
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
if (gimple_code (stmt) == GIMPLE_PHI)
remove_phi_node (&gsi, true);
else
{
basic_block bb = gimple_bb (stmt);
unlink_stmt_vdef (stmt);
if (gsi_remove (&gsi, true))
bitmap_set_bit (el.need_eh_cleanup, bb->index);
if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
bitmap_set_bit (el.need_ab_cleanup, bb->index);
release_defs (stmt);
}
/* Removing a stmt may expose a forwarder block. */
el.el_todo |= TODO_cleanup_cfg;
}
/* Fixup stmts that became noreturn calls. This may require splitting
blocks and thus isn't possible during the dominator walk. Do this
in reverse order so we don't inadvertedly remove a stmt we want to
fixup by visiting a dominating now noreturn call first. */
while (!el.to_fixup.is_empty ())
{
gimple *stmt = el.to_fixup.pop ();
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Fixing up noreturn call ");
print_gimple_stmt (dump_file, stmt, 0);
}
if (fixup_noreturn_call (stmt))
el.el_todo |= TODO_cleanup_cfg;
}
bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
if (do_eh_cleanup)
gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
if (do_ab_cleanup)
gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
if (do_eh_cleanup || do_ab_cleanup)
el.el_todo |= TODO_cleanup_cfg;
statistics_counter_event (cfun, "Eliminated", el.eliminations);
statistics_counter_event (cfun, "Insertions", el.insertions);
return el.el_todo;
}
namespace {
const pass_data pass_data_fre =
{
GIMPLE_PASS, /* type */
"fre", /* name */
OPTGROUP_NONE, /* optinfo_flags */
TV_TREE_FRE, /* tv_id */
( PROP_cfg | PROP_ssa ), /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
0, /* todo_flags_finish */
};
class pass_fre : public gimple_opt_pass
{
public:
pass_fre (gcc::context *ctxt)
: gimple_opt_pass (pass_data_fre, ctxt)
{}
/* opt_pass methods: */
opt_pass * clone () { return new pass_fre (m_ctxt); }
virtual bool gate (function *) { return flag_tree_fre != 0; }
virtual unsigned int execute (function *);
}; // class pass_fre
unsigned int
pass_fre::execute (function *)
{
unsigned int todo = 0;
run_scc_vn (VN_WALKREWRITE);
/* Remove all the redundant expressions. */
todo |= vn_eliminate (NULL);
scc_vn_restore_ssa_info ();
free_scc_vn ();
return todo;
}
} // anon namespace
gimple_opt_pass *
make_pass_fre (gcc::context *ctxt)
{
return new pass_fre (ctxt);
}
......@@ -214,6 +214,7 @@ extern vn_ssa_aux_t VN_INFO (tree);
extern vn_ssa_aux_t VN_INFO_GET (tree);
tree vn_get_expr_for (tree);
void run_scc_vn (vn_lookup_kind);
unsigned int vn_eliminate (bitmap);
void free_scc_vn (void);
void scc_vn_restore_ssa_info (void);
tree vn_nary_op_lookup (tree, vn_nary_op_t *);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment