Commit 1e080ab4 by Richard Biener Committed by Richard Biener

tree-object-size.c: Remove builtins.h include, include tree-cfg.h.

2015-10-26  Richard Biener  <rguenther@suse.de>

	* tree-object-size.c: Remove builtins.h include, include tree-cfg.h.
	(do_valueize): New function.
	(pass_object_sizes::execute): Use gimple_fold_stmt_to_constant and
	replace_uses_by.
	* tree-ssa-threadedge.c: Remove builtins.h include, include
	gimple-fold.h
	(fold_assignment_stmt): Remove.
	(threadedge_valueize): New function.
	(record_temporary_equivalences_from_stmts): Use
	gimple_fold_stmt_to_constant_1, note additional cleanup
	opportunities.

From-SVN: r229364
parent 3373589b
2015-10-26 Richard Biener <rguenther@suse.de> 2015-10-26 Richard Biener <rguenther@suse.de>
* tree-object-size.c: Remove builtins.h include, include tree-cfg.h.
(do_valueize): New function.
(pass_object_sizes::execute): Use gimple_fold_stmt_to_constant and
replace_uses_by.
* tree-ssa-threadedge.c: Remove builtins.h include, include
gimple-fold.h
(fold_assignment_stmt): Remove.
(threadedge_valueize): New function.
(record_temporary_equivalences_from_stmts): Use
gimple_fold_stmt_to_constant_1, note additional cleanup
opportunities.
2015-10-26 Richard Biener <rguenther@suse.de>
* match.pd ((A & ~B) - (A & B) -> (A ^ B) - B): Add missing :c. * match.pd ((A & ~B) - (A & B) -> (A ^ B) - B): Add missing :c.
( (X & ~Y) | (~X & Y) -> X ^ Y): Remove redundant :c. ( (X & ~Y) | (~X & Y) -> X ^ Y): Remove redundant :c.
...@@ -36,7 +36,7 @@ along with GCC; see the file COPYING3. If not see ...@@ -36,7 +36,7 @@ along with GCC; see the file COPYING3. If not see
#include "gimple-iterator.h" #include "gimple-iterator.h"
#include "tree-pass.h" #include "tree-pass.h"
#include "tree-ssa-propagate.h" #include "tree-ssa-propagate.h"
#include "builtins.h" #include "tree-cfg.h"
struct object_size_info struct object_size_info
{ {
...@@ -1231,6 +1231,14 @@ public: ...@@ -1231,6 +1231,14 @@ public:
}; // class pass_object_sizes }; // class pass_object_sizes
/* Dummy valueize function. */
static tree
do_valueize (tree t)
{
return t;
}
unsigned int unsigned int
pass_object_sizes::execute (function *fun) pass_object_sizes::execute (function *fun)
{ {
...@@ -1287,7 +1295,11 @@ pass_object_sizes::execute (function *fun) ...@@ -1287,7 +1295,11 @@ pass_object_sizes::execute (function *fun)
continue; continue;
} }
result = fold_call_stmt (as_a <gcall *> (call), false); tree lhs = gimple_call_lhs (call);
if (!lhs)
continue;
result = gimple_fold_stmt_to_constant (call, do_valueize);
if (!result) if (!result)
{ {
tree ost = gimple_call_arg (call, 1); tree ost = gimple_call_arg (call, 1);
...@@ -1318,22 +1330,8 @@ pass_object_sizes::execute (function *fun) ...@@ -1318,22 +1330,8 @@ pass_object_sizes::execute (function *fun)
fprintf (dump_file, "\n"); fprintf (dump_file, "\n");
} }
tree lhs = gimple_call_lhs (call);
if (!lhs)
continue;
/* Propagate into all uses and fold those stmts. */ /* Propagate into all uses and fold those stmts. */
gimple *use_stmt; replace_uses_by (lhs, result);
imm_use_iterator iter;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
{
use_operand_p use_p;
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
SET_USE (use_p, result);
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
fold_stmt (&gsi);
update_stmt (gsi_stmt (gsi));
}
} }
} }
......
...@@ -36,7 +36,7 @@ along with GCC; see the file COPYING3. If not see ...@@ -36,7 +36,7 @@ along with GCC; see the file COPYING3. If not see
#include "tree-ssa-threadedge.h" #include "tree-ssa-threadedge.h"
#include "tree-ssa-threadbackward.h" #include "tree-ssa-threadbackward.h"
#include "tree-ssa-dom.h" #include "tree-ssa-dom.h"
#include "builtins.h" #include "gimple-fold.h"
/* To avoid code explosion due to jump threading, we limit the /* To avoid code explosion due to jump threading, we limit the
number of statements we are going to copy. This variable number of statements we are going to copy. This variable
...@@ -180,54 +180,18 @@ record_temporary_equivalences_from_phis (edge e, const_and_copies *const_and_cop ...@@ -180,54 +180,18 @@ record_temporary_equivalences_from_phis (edge e, const_and_copies *const_and_cop
return true; return true;
} }
/* Fold the RHS of an assignment statement and return it as a tree. /* Valueize hook for gimple_fold_stmt_to_constant_1. */
May return NULL_TREE if no simplification is possible. */
static tree static tree
fold_assignment_stmt (gimple *stmt) threadedge_valueize (tree t)
{ {
enum tree_code subcode = gimple_assign_rhs_code (stmt); if (TREE_CODE (t) == SSA_NAME)
switch (get_gimple_rhs_class (subcode))
{ {
case GIMPLE_SINGLE_RHS: tree tem = SSA_NAME_VALUE (t);
return fold (gimple_assign_rhs1 (stmt)); if (tem)
return tem;
case GIMPLE_UNARY_RHS:
{
tree lhs = gimple_assign_lhs (stmt);
tree op0 = gimple_assign_rhs1 (stmt);
return fold_unary (subcode, TREE_TYPE (lhs), op0);
}
case GIMPLE_BINARY_RHS:
{
tree lhs = gimple_assign_lhs (stmt);
tree op0 = gimple_assign_rhs1 (stmt);
tree op1 = gimple_assign_rhs2 (stmt);
return fold_binary (subcode, TREE_TYPE (lhs), op0, op1);
}
case GIMPLE_TERNARY_RHS:
{
tree lhs = gimple_assign_lhs (stmt);
tree op0 = gimple_assign_rhs1 (stmt);
tree op1 = gimple_assign_rhs2 (stmt);
tree op2 = gimple_assign_rhs3 (stmt);
/* Sadly, we have to handle conditional assignments specially
here, because fold expects all the operands of an expression
to be folded before the expression itself is folded, but we
can't just substitute the folded condition here. */
if (gimple_assign_rhs_code (stmt) == COND_EXPR)
op0 = fold (op0);
return fold_ternary (subcode, TREE_TYPE (lhs), op0, op1, op2);
}
default:
gcc_unreachable ();
} }
return t;
} }
/* Try to simplify each statement in E->dest, ultimately leading to /* Try to simplify each statement in E->dest, ultimately leading to
...@@ -371,48 +335,50 @@ record_temporary_equivalences_from_stmts_at_dest (edge e, ...@@ -371,48 +335,50 @@ record_temporary_equivalences_from_stmts_at_dest (edge e,
else else
{ {
/* A statement that is not a trivial copy or ASSERT_EXPR. /* A statement that is not a trivial copy or ASSERT_EXPR.
We're going to temporarily copy propagate the operands Try to fold the new expression. Inserting the
and see if that allows us to simplify this statement. */
tree *copy;
ssa_op_iter iter;
use_operand_p use_p;
unsigned int num, i = 0;
num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
copy = XCNEWVEC (tree, num);
/* Make a copy of the uses & vuses into USES_COPY, then cprop into
the operands. */
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
{
tree tmp = NULL;
tree use = USE_FROM_PTR (use_p);
copy[i++] = use;
if (TREE_CODE (use) == SSA_NAME)
tmp = SSA_NAME_VALUE (use);
if (tmp)
SET_USE (use_p, tmp);
}
/* Try to fold/lookup the new expression. Inserting the
expression into the hash table is unlikely to help. */ expression into the hash table is unlikely to help. */
if (is_gimple_call (stmt)) /* ??? The DOM callback below can be changed to setting
cached_lhs = fold_call_stmt (as_a <gcall *> (stmt), false); the mprts_hook around the call to thread_across_edge,
else avoiding the use substitution. The VRP hook should be
cached_lhs = fold_assignment_stmt (stmt); changed to properly valueize operands itself using
SSA_NAME_VALUE in addition to its own lattice. */
cached_lhs = gimple_fold_stmt_to_constant_1 (stmt,
threadedge_valueize);
if (!cached_lhs if (!cached_lhs
|| (TREE_CODE (cached_lhs) != SSA_NAME || (TREE_CODE (cached_lhs) != SSA_NAME
&& !is_gimple_min_invariant (cached_lhs))) && !is_gimple_min_invariant (cached_lhs)))
cached_lhs = (*simplify) (stmt, stmt, avail_exprs_stack); {
/* We're going to temporarily copy propagate the operands
and see if that allows us to simplify this statement. */
tree *copy;
ssa_op_iter iter;
use_operand_p use_p;
unsigned int num, i = 0;
num = NUM_SSA_OPERANDS (stmt, SSA_OP_ALL_USES);
copy = XALLOCAVEC (tree, num);
/* Make a copy of the uses & vuses into USES_COPY, then cprop into
the operands. */
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
{
tree tmp = NULL;
tree use = USE_FROM_PTR (use_p);
copy[i++] = use;
if (TREE_CODE (use) == SSA_NAME)
tmp = SSA_NAME_VALUE (use);
if (tmp)
SET_USE (use_p, tmp);
}
/* Restore the statement's original uses/defs. */ cached_lhs = (*simplify) (stmt, stmt, avail_exprs_stack);
i = 0;
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
SET_USE (use_p, copy[i++]);
free (copy); /* Restore the statement's original uses/defs. */
i = 0;
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
SET_USE (use_p, copy[i++]);
}
} }
/* Record the context sensitive equivalence if we were able /* Record the context sensitive equivalence if we were able
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment