Commit 8cb65b37 by Marc Glisse Committed by Marc Glisse

re PR tree-optimization/62112 (Optimize out malloc when block is unused or write-only)

2014-08-21  Marc Glisse  <marc.glisse@inria.fr>

	PR tree-optimization/62112
gcc/
	* gimple-iterator.c (gsi_replace): Return whether EH cleanup is needed.
	* gimple-iterator.h (gsi_replace): Return bool.
	* tree-ssa-alias.c (ref_may_alias_global_p_1): New helper, code
	moved from ref_may_alias_global_p.
	(ref_may_alias_global_p, refs_may_alias_p, ref_maybe_used_by_stmt_p):
	New overloads.
	(ref_maybe_used_by_call_p): Take ao_ref* instead of tree.
	(stmt_kills_ref_p_1): Rename...
	(stmt_kills_ref_p): ... to this.
	* tree-ssa-alias.h (ref_may_alias_global_p, ref_maybe_used_by_stmt_p,
	stmt_kills_ref_p): Declare.
	* tree-ssa-dse.c (dse_possible_dead_store_p): New argument, use it.
	Move the self-assignment case...
	(dse_optimize_stmt): ... here. Handle builtin calls. Remove dead code.
gcc/testsuite/
	* gcc.dg/tree-ssa/pr62112-1.c: New file.
	* gcc.dg/tree-ssa/pr62112-2.c: Likewise.
	* gcc.c-torture/execute/pr35472.c: Add noclone attribute.
	* gcc.c-torture/execute/20071219-1.c: Likewise.

From-SVN: r214262
parent 31879c2c
2014-08-21 Marc Glisse <marc.glisse@inria.fr>
PR tree-optimization/62112
* gimple-iterator.c (gsi_replace): Return whether EH cleanup is needed.
* gimple-iterator.h (gsi_replace): Return bool.
* tree-ssa-alias.c (ref_may_alias_global_p_1): New helper, code
moved from ref_may_alias_global_p.
(ref_may_alias_global_p, refs_may_alias_p, ref_maybe_used_by_stmt_p):
New overloads.
(ref_maybe_used_by_call_p): Take ao_ref* instead of tree.
(stmt_kills_ref_p_1): Rename...
(stmt_kills_ref_p): ... to this.
* tree-ssa-alias.h (ref_may_alias_global_p, ref_maybe_used_by_stmt_p,
stmt_kills_ref_p): Declare.
* tree-ssa-dse.c (dse_possible_dead_store_p): New argument, use it.
Move the self-assignment case...
(dse_optimize_stmt): ... here. Handle builtin calls. Remove dead code.
2014-08-21 David Malcolm <dmalcolm@redhat.com> 2014-08-21 David Malcolm <dmalcolm@redhat.com>
* rtl.h (try_split): Strengthen return type from rtx to rtx_insn *. * rtl.h (try_split): Strengthen return type from rtx to rtx_insn *.
......
...@@ -429,15 +429,17 @@ gsi_split_seq_before (gimple_stmt_iterator *i, gimple_seq *pnew_seq) ...@@ -429,15 +429,17 @@ gsi_split_seq_before (gimple_stmt_iterator *i, gimple_seq *pnew_seq)
/* Replace the statement pointed-to by GSI to STMT. If UPDATE_EH_INFO /* Replace the statement pointed-to by GSI to STMT. If UPDATE_EH_INFO
is true, the exception handling information of the original is true, the exception handling information of the original
statement is moved to the new statement. Assignments must only be statement is moved to the new statement. Assignments must only be
replaced with assignments to the same LHS. */ replaced with assignments to the same LHS. Returns whether EH edge
cleanup is required. */
void bool
gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info) gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info)
{ {
gimple orig_stmt = gsi_stmt (*gsi); gimple orig_stmt = gsi_stmt (*gsi);
bool require_eh_edge_purge = false;
if (stmt == orig_stmt) if (stmt == orig_stmt)
return; return false;
gcc_assert (!gimple_has_lhs (orig_stmt) || !gimple_has_lhs (stmt) gcc_assert (!gimple_has_lhs (orig_stmt) || !gimple_has_lhs (stmt)
|| gimple_get_lhs (orig_stmt) == gimple_get_lhs (stmt)); || gimple_get_lhs (orig_stmt) == gimple_get_lhs (stmt));
...@@ -448,7 +450,7 @@ gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info) ...@@ -448,7 +450,7 @@ gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info)
/* Preserve EH region information from the original statement, if /* Preserve EH region information from the original statement, if
requested by the caller. */ requested by the caller. */
if (update_eh_info) if (update_eh_info)
maybe_clean_or_replace_eh_stmt (orig_stmt, stmt); require_eh_edge_purge = maybe_clean_or_replace_eh_stmt (orig_stmt, stmt);
gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt); gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
...@@ -460,6 +462,7 @@ gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info) ...@@ -460,6 +462,7 @@ gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info)
gsi_set_stmt (gsi, stmt); gsi_set_stmt (gsi, stmt);
gimple_set_modified (stmt, true); gimple_set_modified (stmt, true);
update_modified_stmt (stmt); update_modified_stmt (stmt);
return require_eh_edge_purge;
} }
......
...@@ -58,7 +58,7 @@ extern void gsi_insert_seq_after (gimple_stmt_iterator *, gimple_seq, ...@@ -58,7 +58,7 @@ extern void gsi_insert_seq_after (gimple_stmt_iterator *, gimple_seq,
extern gimple_seq gsi_split_seq_after (gimple_stmt_iterator); extern gimple_seq gsi_split_seq_after (gimple_stmt_iterator);
extern void gsi_set_stmt (gimple_stmt_iterator *, gimple); extern void gsi_set_stmt (gimple_stmt_iterator *, gimple);
extern void gsi_split_seq_before (gimple_stmt_iterator *, gimple_seq *); extern void gsi_split_seq_before (gimple_stmt_iterator *, gimple_seq *);
extern void gsi_replace (gimple_stmt_iterator *, gimple, bool); extern bool gsi_replace (gimple_stmt_iterator *, gimple, bool);
extern void gsi_replace_with_seq (gimple_stmt_iterator *, gimple_seq, bool); extern void gsi_replace_with_seq (gimple_stmt_iterator *, gimple_seq, bool);
extern void gsi_insert_before_without_update (gimple_stmt_iterator *, gimple, extern void gsi_insert_before_without_update (gimple_stmt_iterator *, gimple,
enum gsi_iterator_update); enum gsi_iterator_update);
......
2014-08-21 Marc Glisse <marc.glisse@inria.fr>
PR tree-optimization/62112
* gcc.dg/tree-ssa/pr62112-1.c: New file.
* gcc.dg/tree-ssa/pr62112-2.c: Likewise.
* gcc.c-torture/execute/pr35472.c: Add noclone attribute.
* gcc.c-torture/execute/20071219-1.c: Likewise.
2014-08-20 Bill Schmidt <wschmidt@linux.vnet.ibm.com> 2014-08-20 Bill Schmidt <wschmidt@linux.vnet.ibm.com>
* testsuite/gcc.target/powerpc/builtins-1.c: New test. * testsuite/gcc.target/powerpc/builtins-1.c: New test.
......
...@@ -10,7 +10,7 @@ struct S ...@@ -10,7 +10,7 @@ struct S
struct S *p; struct S *p;
void __attribute__((noinline)) void __attribute__((noinline,noclone))
foo (struct S *x, int set) foo (struct S *x, int set)
{ {
int i; int i;
...@@ -22,7 +22,7 @@ foo (struct S *x, int set) ...@@ -22,7 +22,7 @@ foo (struct S *x, int set)
p = x; p = x;
} }
void __attribute__((noinline)) void __attribute__((noinline,noclone))
test1 (void) test1 (void)
{ {
struct S a; struct S a;
...@@ -35,7 +35,7 @@ test1 (void) ...@@ -35,7 +35,7 @@ test1 (void)
foo (&b, 0); foo (&b, 0);
} }
void __attribute__((noinline)) void __attribute__((noinline,noclone))
test2 (void) test2 (void)
{ {
struct S a; struct S a;
...@@ -48,7 +48,7 @@ test2 (void) ...@@ -48,7 +48,7 @@ test2 (void)
foo (&b, 0); foo (&b, 0);
} }
void __attribute__((noinline)) void __attribute__((noinline,noclone))
test3 (void) test3 (void)
{ {
struct S a; struct S a;
......
...@@ -2,7 +2,7 @@ extern void abort (void); ...@@ -2,7 +2,7 @@ extern void abort (void);
extern void *memset (void *s, int c, __SIZE_TYPE__ n); extern void *memset (void *s, int c, __SIZE_TYPE__ n);
struct S { int i[16]; }; struct S { int i[16]; };
struct S *p; struct S *p;
void __attribute__((noinline)) void __attribute__((noinline,noclone))
foo(struct S *a, struct S *b) { a->i[0] = -1; p = b; } foo(struct S *a, struct S *b) { a->i[0] = -1; p = b; }
void test (void) void test (void)
{ {
......
/* { dg-do compile } */
/* { dg-options "-O1 -fdump-tree-dse1-details" } */
void f(){
char*p=__builtin_malloc(42);
__builtin_memset(p,3,10);
__builtin_memset(p,7,33);
}
char*g;
void h(){
char*p=__builtin_malloc(42);
g=__builtin_memset(p,3,10);
__builtin_free(p);
}
char*i(){
char*p=__builtin_malloc(42);
__builtin_memset(p,3,10);
__builtin_memset(p,7,33);
return p;
}
/* { dg-final { scan-tree-dump-times "Deleted dead call" 4 "dse1" } } */
/* { dg-final { cleanup-tree-dump "dse1" } } */
/* { dg-do compile } */
/* { dg-options "-O1 -fdump-tree-dse1-details" } */
char*g;
char* f(){
char*p=__builtin_malloc(42);
__builtin_memset(p,3,33);
__builtin_memset(p,7,10);
return p;
}
void h(){
char*p=__builtin_malloc(42);
g=__builtin_memset(p,3,10);
}
/* { dg-final { scan-tree-dump-not "Deleted dead" "dse1" } } */
/* { dg-final { cleanup-tree-dump "dse1" } } */
...@@ -330,12 +330,11 @@ ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref) ...@@ -330,12 +330,11 @@ ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
return true; return true;
} }
/* Return true whether REF may refer to global memory. */ /* Returns whether reference REF to BASE may refer to global memory. */
bool static bool
ref_may_alias_global_p (tree ref) ref_may_alias_global_p_1 (tree base)
{ {
tree base = get_base_address (ref);
if (DECL_P (base)) if (DECL_P (base))
return is_global_var (base); return is_global_var (base);
else if (TREE_CODE (base) == MEM_REF else if (TREE_CODE (base) == MEM_REF
...@@ -344,6 +343,20 @@ ref_may_alias_global_p (tree ref) ...@@ -344,6 +343,20 @@ ref_may_alias_global_p (tree ref)
return true; return true;
} }
bool
ref_may_alias_global_p (ao_ref *ref)
{
tree base = ao_ref_base (ref);
return ref_may_alias_global_p_1 (base);
}
bool
ref_may_alias_global_p (tree ref)
{
tree base = get_base_address (ref);
return ref_may_alias_global_p_1 (base);
}
/* Return true whether STMT may clobber global memory. */ /* Return true whether STMT may clobber global memory. */
bool bool
...@@ -1413,6 +1426,14 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p) ...@@ -1413,6 +1426,14 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
#endif #endif
} }
static bool
refs_may_alias_p (tree ref1, ao_ref *ref2)
{
ao_ref r1;
ao_ref_init (&r1, ref1);
return refs_may_alias_p_1 (&r1, ref2, true);
}
bool bool
refs_may_alias_p (tree ref1, tree ref2) refs_may_alias_p (tree ref1, tree ref2)
{ {
...@@ -1769,12 +1790,10 @@ process_args: ...@@ -1769,12 +1790,10 @@ process_args:
} }
static bool static bool
ref_maybe_used_by_call_p (gimple call, tree ref) ref_maybe_used_by_call_p (gimple call, ao_ref *ref)
{ {
ao_ref r;
bool res; bool res;
ao_ref_init (&r, ref); res = ref_maybe_used_by_call_p_1 (call, ref);
res = ref_maybe_used_by_call_p_1 (call, &r);
if (res) if (res)
++alias_stats.ref_maybe_used_by_call_p_may_alias; ++alias_stats.ref_maybe_used_by_call_p_may_alias;
else else
...@@ -1787,7 +1806,7 @@ ref_maybe_used_by_call_p (gimple call, tree ref) ...@@ -1787,7 +1806,7 @@ ref_maybe_used_by_call_p (gimple call, tree ref)
true, otherwise return false. */ true, otherwise return false. */
bool bool
ref_maybe_used_by_stmt_p (gimple stmt, tree ref) ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
{ {
if (is_gimple_assign (stmt)) if (is_gimple_assign (stmt))
{ {
...@@ -1810,14 +1829,13 @@ ref_maybe_used_by_stmt_p (gimple stmt, tree ref) ...@@ -1810,14 +1829,13 @@ ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
else if (gimple_code (stmt) == GIMPLE_RETURN) else if (gimple_code (stmt) == GIMPLE_RETURN)
{ {
tree retval = gimple_return_retval (stmt); tree retval = gimple_return_retval (stmt);
tree base;
if (retval if (retval
&& TREE_CODE (retval) != SSA_NAME && TREE_CODE (retval) != SSA_NAME
&& !is_gimple_min_invariant (retval) && !is_gimple_min_invariant (retval)
&& refs_may_alias_p (retval, ref)) && refs_may_alias_p (retval, ref))
return true; return true;
/* If ref escapes the function then the return acts as a use. */ /* If ref escapes the function then the return acts as a use. */
base = get_base_address (ref); tree base = ao_ref_base (ref);
if (!base) if (!base)
; ;
else if (DECL_P (base)) else if (DECL_P (base))
...@@ -1831,6 +1849,14 @@ ref_maybe_used_by_stmt_p (gimple stmt, tree ref) ...@@ -1831,6 +1849,14 @@ ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
return true; return true;
} }
bool
ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
{
ao_ref r;
ao_ref_init (&r, ref);
return ref_maybe_used_by_stmt_p (stmt, &r);
}
/* If the call in statement CALL may clobber the memory reference REF /* If the call in statement CALL may clobber the memory reference REF
return true, otherwise return false. */ return true, otherwise return false. */
...@@ -2169,8 +2195,8 @@ stmt_may_clobber_ref_p (gimple stmt, tree ref) ...@@ -2169,8 +2195,8 @@ stmt_may_clobber_ref_p (gimple stmt, tree ref)
/* If STMT kills the memory reference REF return true, otherwise /* If STMT kills the memory reference REF return true, otherwise
return false. */ return false. */
static bool bool
stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref) stmt_kills_ref_p (gimple stmt, ao_ref *ref)
{ {
if (!ao_ref_base (ref)) if (!ao_ref_base (ref))
return false; return false;
...@@ -2357,7 +2383,7 @@ stmt_kills_ref_p (gimple stmt, tree ref) ...@@ -2357,7 +2383,7 @@ stmt_kills_ref_p (gimple stmt, tree ref)
{ {
ao_ref r; ao_ref r;
ao_ref_init (&r, ref); ao_ref_init (&r, ref);
return stmt_kills_ref_p_1 (stmt, &r); return stmt_kills_ref_p (stmt, &r);
} }
......
...@@ -101,17 +101,20 @@ extern alias_set_type ao_ref_alias_set (ao_ref *); ...@@ -101,17 +101,20 @@ extern alias_set_type ao_ref_alias_set (ao_ref *);
extern bool ptr_deref_may_alias_global_p (tree); extern bool ptr_deref_may_alias_global_p (tree);
extern bool ptr_derefs_may_alias_p (tree, tree); extern bool ptr_derefs_may_alias_p (tree, tree);
extern bool ref_may_alias_global_p (tree); extern bool ref_may_alias_global_p (tree);
extern bool ref_may_alias_global_p (ao_ref *);
extern bool refs_may_alias_p (tree, tree); extern bool refs_may_alias_p (tree, tree);
extern bool refs_may_alias_p_1 (ao_ref *, ao_ref *, bool); extern bool refs_may_alias_p_1 (ao_ref *, ao_ref *, bool);
extern bool refs_anti_dependent_p (tree, tree); extern bool refs_anti_dependent_p (tree, tree);
extern bool refs_output_dependent_p (tree, tree); extern bool refs_output_dependent_p (tree, tree);
extern bool ref_maybe_used_by_stmt_p (gimple, tree); extern bool ref_maybe_used_by_stmt_p (gimple, tree);
extern bool ref_maybe_used_by_stmt_p (gimple, ao_ref *);
extern bool stmt_may_clobber_global_p (gimple); extern bool stmt_may_clobber_global_p (gimple);
extern bool stmt_may_clobber_ref_p (gimple, tree); extern bool stmt_may_clobber_ref_p (gimple, tree);
extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *); extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *);
extern bool call_may_clobber_ref_p (gimple, tree); extern bool call_may_clobber_ref_p (gimple, tree);
extern bool call_may_clobber_ref_p_1 (gimple, ao_ref *); extern bool call_may_clobber_ref_p_1 (gimple, ao_ref *);
extern bool stmt_kills_ref_p (gimple, tree); extern bool stmt_kills_ref_p (gimple, tree);
extern bool stmt_kills_ref_p (gimple, ao_ref *);
extern tree get_continuation_for_phi (gimple, ao_ref *, extern tree get_continuation_for_phi (gimple, ao_ref *,
unsigned int *, bitmap *, bool, unsigned int *, bitmap *, bool,
void *(*)(ao_ref *, tree, void *, bool), void *(*)(ao_ref *, tree, void *, bool),
......
...@@ -82,25 +82,18 @@ static bitmap need_eh_cleanup; ...@@ -82,25 +82,18 @@ static bitmap need_eh_cleanup;
/* A helper of dse_optimize_stmt. /* A helper of dse_optimize_stmt.
Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that Given a GIMPLE_ASSIGN in STMT that writes to REF, find a candidate
may prove STMT to be dead. statement *USE_STMT that may prove STMT to be dead.
Return TRUE if the above conditions are met, otherwise FALSE. */ Return TRUE if the above conditions are met, otherwise FALSE. */
static bool static bool
dse_possible_dead_store_p (gimple stmt, gimple *use_stmt) dse_possible_dead_store_p (ao_ref *ref, gimple stmt, gimple *use_stmt)
{ {
gimple temp; gimple temp;
unsigned cnt = 0; unsigned cnt = 0;
*use_stmt = NULL; *use_stmt = NULL;
/* Self-assignments are zombies. */
if (operand_equal_p (gimple_assign_rhs1 (stmt), gimple_assign_lhs (stmt), 0))
{
*use_stmt = stmt;
return true;
}
/* Find the first dominated statement that clobbers (part of) the /* Find the first dominated statement that clobbers (part of) the
memory stmt stores to with no intermediate statement that may use memory stmt stores to with no intermediate statement that may use
part of the memory stmt stores. That is, find a store that may part of the memory stmt stores. That is, find a store that may
...@@ -164,8 +157,7 @@ dse_possible_dead_store_p (gimple stmt, gimple *use_stmt) ...@@ -164,8 +157,7 @@ dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
temp = use_stmt; temp = use_stmt;
} }
/* If the statement is a use the store is not dead. */ /* If the statement is a use the store is not dead. */
else if (ref_maybe_used_by_stmt_p (use_stmt, else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
gimple_assign_lhs (stmt)))
{ {
fail = true; fail = true;
BREAK_FROM_IMM_USE_STMT (ui); BREAK_FROM_IMM_USE_STMT (ui);
...@@ -191,7 +183,7 @@ dse_possible_dead_store_p (gimple stmt, gimple *use_stmt) ...@@ -191,7 +183,7 @@ dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
just pretend the stmt makes itself dead. Otherwise fail. */ just pretend the stmt makes itself dead. Otherwise fail. */
if (!temp) if (!temp)
{ {
if (stmt_may_clobber_global_p (stmt)) if (ref_may_alias_global_p (ref))
return false; return false;
temp = stmt; temp = stmt;
...@@ -199,7 +191,7 @@ dse_possible_dead_store_p (gimple stmt, gimple *use_stmt) ...@@ -199,7 +191,7 @@ dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
} }
} }
/* Continue walking until we reach a kill. */ /* Continue walking until we reach a kill. */
while (!stmt_kills_ref_p (temp, gimple_assign_lhs (stmt))); while (!stmt_kills_ref_p (temp, ref));
*use_stmt = temp; *use_stmt = temp;
...@@ -228,23 +220,78 @@ dse_optimize_stmt (gimple_stmt_iterator *gsi) ...@@ -228,23 +220,78 @@ dse_optimize_stmt (gimple_stmt_iterator *gsi)
if (!gimple_vdef (stmt)) if (!gimple_vdef (stmt))
return; return;
/* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
that's not also a function call, then record it into our table. */
if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
return;
/* Don't return early on *this_2(D) ={v} {CLOBBER}. */ /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
if (gimple_has_volatile_ops (stmt) if (gimple_has_volatile_ops (stmt)
&& (!gimple_clobber_p (stmt) && (!gimple_clobber_p (stmt)
|| TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF)) || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
return; return;
/* We know we have virtual definitions. We can handle assignments and
some builtin calls. */
if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
{
switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
{
case BUILT_IN_MEMCPY:
case BUILT_IN_MEMMOVE:
case BUILT_IN_MEMSET:
{
gimple use_stmt;
ao_ref ref;
tree size = NULL_TREE;
if (gimple_call_num_args (stmt) == 3)
size = gimple_call_arg (stmt, 2);
tree ptr = gimple_call_arg (stmt, 0);
ao_ref_init_from_ptr_and_size (&ref, ptr, size);
if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt))
return;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " Deleted dead call '");
print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
fprintf (dump_file, "'\n");
}
tree lhs = gimple_call_lhs (stmt);
if (lhs)
{
gimple new_stmt = gimple_build_assign (lhs, ptr);
unlink_stmt_vdef (stmt);
if (gsi_replace (gsi, new_stmt, true))
bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
}
else
{
/* Then we need to fix the operand of the consuming stmt. */
unlink_stmt_vdef (stmt);
/* Remove the dead store. */
if (gsi_remove (gsi, true))
bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
}
break;
}
default:
return;
}
}
if (is_gimple_assign (stmt)) if (is_gimple_assign (stmt))
{ {
gimple use_stmt; gimple use_stmt;
if (!dse_possible_dead_store_p (stmt, &use_stmt)) /* Self-assignments are zombies. */
if (operand_equal_p (gimple_assign_rhs1 (stmt),
gimple_assign_lhs (stmt), 0))
use_stmt = stmt;
else
{
ao_ref ref;
ao_ref_init (&ref, gimple_assign_lhs (stmt));
if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt))
return; return;
}
/* Now we know that use_stmt kills the LHS of stmt. */ /* Now we know that use_stmt kills the LHS of stmt. */
...@@ -254,23 +301,6 @@ dse_optimize_stmt (gimple_stmt_iterator *gsi) ...@@ -254,23 +301,6 @@ dse_optimize_stmt (gimple_stmt_iterator *gsi)
&& !gimple_clobber_p (use_stmt)) && !gimple_clobber_p (use_stmt))
return; return;
basic_block bb;
/* If use_stmt is or might be a nop assignment, e.g. for
struct { ... } S a, b, *p; ...
b = a; b = b;
or
b = a; b = *p; where p might be &b,
or
*p = a; *p = b; where p might be &b,
or
*p = *u; *p = *v; where p might be v, then USE_STMT
acts as a use as well as definition, so store in STMT
is not dead. */
if (stmt != use_stmt
&& ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
return;
if (dump_file && (dump_flags & TDF_DETAILS)) if (dump_file && (dump_flags & TDF_DETAILS))
{ {
fprintf (dump_file, " Deleted dead store '"); fprintf (dump_file, " Deleted dead store '");
...@@ -282,7 +312,7 @@ dse_optimize_stmt (gimple_stmt_iterator *gsi) ...@@ -282,7 +312,7 @@ dse_optimize_stmt (gimple_stmt_iterator *gsi)
unlink_stmt_vdef (stmt); unlink_stmt_vdef (stmt);
/* Remove the dead store. */ /* Remove the dead store. */
bb = gimple_bb (stmt); basic_block bb = gimple_bb (stmt);
if (gsi_remove (gsi, true)) if (gsi_remove (gsi, true))
bitmap_set_bit (need_eh_cleanup, bb->index); bitmap_set_bit (need_eh_cleanup, bb->index);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment