Commit a8da523f by Jan Hubicka Committed by Jan Hubicka

re PR tree-optimization/28850 (missed call -> jmp transformation; redundant…

re PR tree-optimization/28850 (missed call -> jmp transformation; redundant unwind stuff with empty finally)


	PR middle-end/28850
	* tree-pass.h (pass_cleanup_eh): New function.
	(remove_unreachable_regions): Break code handling RTL
	to rtl_remove_unreachable_regions; remove ERT_MUST_NOT_THROW
	that can not be reached by runtime.
	(can_be_reached_by_runtime): New function.
	(label_to_region_map): New function.
	(num_eh_regions): New function.
	(rtl_remove_unreachable_regions): New function.
	(convert_from_eh_region_ranges): Call rtl_remove_unreachable_regions.
	(remove_eh_region): New function.
	* except.h: Include sbitmap and vecprim.
	(remove_eh_region, remove_unreachable_regions, label_to_region_map,
	num_eh_regions): Declare.
	* passes.c (init_optimization_passes): Schedule cleanup_eh.
	* Makefile.in (EXCEPT_H): New; replace all uses of except.h
	by it.
	* tree-eh.c (tree_remove_unreachable_handlers): New function.
	(tree_empty_eh_handler_p): New function.
	(cleanup_empty_eh): New function.
	(cleanup_eh): New function.
	(pass_cleanup_eh): New function.

From-SVN: r145233
parent 98f358e5
2009-03-29 Jan Hubicka <jh@suse.cz> 2009-03-29 Jan Hubicka <jh@suse.cz>
PR middle-end/28850
* tree-pass.h (pass_cleanup_eh): New function.
(remove_unreachable_regions): Break code handling RTL
to rtl_remove_unreachable_regions; remove ERT_MUST_NOT_THROW
that can not be reached by runtime.
(can_be_reached_by_runtime): New function.
(label_to_region_map): New function.
(num_eh_regions): New function.
(rtl_remove_unreachable_regions): New function.
(convert_from_eh_region_ranges): Call rtl_remove_unreachable_regions.
(remove_eh_region): New function.
* except.h: Include sbitmap and vecprim.
(remove_eh_region, remove_unreachable_regions, label_to_region_map,
num_eh_regions): Declare.
* passes.c (init_optimization_passes): Schedule cleanup_eh.
* Makefile.in (EXCEPT_H): New; replace all uses of except.h
by it.
* tree-eh.c (tree_remove_unreachable_handlers): New function.
(tree_empty_eh_handler_p): New function.
(cleanup_empty_eh): New function.
(cleanup_eh): New function.
(pass_cleanup_eh): New function.
2009-03-29 Jan Hubicka <jh@suse.cz>
* except.c (verify_eh_tree): Fix handling of fun!=cfun; be ready * except.c (verify_eh_tree): Fix handling of fun!=cfun; be ready
for removed regions. for removed regions.
......
...@@ -227,8 +227,6 @@ static hashval_t t2r_hash (const void *); ...@@ -227,8 +227,6 @@ static hashval_t t2r_hash (const void *);
static void add_type_for_runtime (tree); static void add_type_for_runtime (tree);
static tree lookup_type_for_runtime (tree); static tree lookup_type_for_runtime (tree);
static void remove_unreachable_regions (rtx);
static int ttypes_filter_eq (const void *, const void *); static int ttypes_filter_eq (const void *, const void *);
static hashval_t ttypes_filter_hash (const void *); static hashval_t ttypes_filter_hash (const void *);
static int ehspec_filter_eq (const void *, const void *); static int ehspec_filter_eq (const void *, const void *);
...@@ -622,69 +620,145 @@ collect_eh_region_array (void) ...@@ -622,69 +620,145 @@ collect_eh_region_array (void)
} }
} }
/* Remove all regions whose labels are not reachable from insns. */ /* R is MUST_NOT_THROW region that is not reachable via local
RESX instructions. It still must be kept in the tree in case runtime
can unwind through it, or we will eliminate out terminate call
runtime would do otherwise. Return TRUE if R contains throwing statements
or some of the exceptions in inner regions can be unwound up to R.
CONTAINS_STMT is bitmap of all regions that contains some throwing
statements.
Function looks O(^3) at first sight. In fact the function is called at most
once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
the outer loop examines every region at most once. The inner loop
is doing unwinding from the throwing statement same way as we do during
CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
of CFG. In practice Eh trees are wide, not deep, so this is not
a problem. */
static void static bool
remove_unreachable_regions (rtx insns) can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
{ {
int i, *uid_region_num; struct eh_region *i = r->inner;
bool *reachable; unsigned n;
struct eh_region *r; bitmap_iterator bi;
rtx insn;
uid_region_num = XCNEWVEC (int, get_max_uid ()); if (TEST_BIT (contains_stmt, r->region_number))
reachable = XCNEWVEC (bool, cfun->eh->last_region_number + 1); return true;
if (r->aka)
for (i = cfun->eh->last_region_number; i > 0; --i) EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
if (TEST_BIT (contains_stmt, n))
return true;
if (!i)
return false;
while (1)
{ {
r = VEC_index (eh_region, cfun->eh->region_array, i); /* It is pointless to look into MUST_NOT_THROW
if (!r || r->region_number != i) or dive into subregions. They never unwind up. */
continue; if (i->type != ERT_MUST_NOT_THROW)
if (r->resume)
{ {
gcc_assert (!uid_region_num[INSN_UID (r->resume)]); bool found = TEST_BIT (contains_stmt, i->region_number);
uid_region_num[INSN_UID (r->resume)] = i; if (!found)
EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
if (TEST_BIT (contains_stmt, n))
{
found = true;
break;
}
/* We have nested region that contains throwing statement.
See if resuming might lead up to the resx or we get locally
caught sooner. If we get locally caught sooner, we either
know region R is not reachable or it would have direct edge
from the EH resx and thus consider region reachable at
firest place. */
if (found)
{
struct eh_region *i1 = i;
tree type_thrown = NULL_TREE;
if (i1->type == ERT_THROW)
{
type_thrown = i1->u.eh_throw.type;
i1 = i1->outer;
}
for (; i1 != r; i1 = i1->outer)
if (reachable_next_level (i1, type_thrown, NULL,
false) >= RNL_CAUGHT)
break;
if (i1 == r)
return true;
}
} }
if (r->label) /* If there are sub-regions, process them. */
if (i->type != ERT_MUST_NOT_THROW && i->inner)
i = i->inner;
/* If there are peers, process them. */
else if (i->next_peer)
i = i->next_peer;
/* Otherwise, step back up the tree to the next peer. */
else
{ {
gcc_assert (!uid_region_num[INSN_UID (r->label)]); do
uid_region_num[INSN_UID (r->label)] = i; {
i = i->outer;
if (i == r)
return false;
}
while (i->next_peer == NULL);
i = i->next_peer;
} }
} }
}
for (insn = insns; insn; insn = NEXT_INSN (insn)) /* Remove all regions whose labels are not reachable.
reachable[uid_region_num[INSN_UID (insn)]] = true; REACHABLE is bitmap of all regions that are used by the function
CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
void
remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
{
int i;
struct eh_region *r;
for (i = cfun->eh->last_region_number; i > 0; --i) for (i = cfun->eh->last_region_number; i > 0; --i)
{ {
r = VEC_index (eh_region, cfun->eh->region_array, i); r = VEC_index (eh_region, cfun->eh->region_array, i);
if (r && r->region_number == i && !reachable[i]) if (!r)
continue;
if (r->region_number == i && !TEST_BIT (reachable, i) && !r->resume)
{ {
bool kill_it = true; bool kill_it = true;
r->tree_label = NULL;
switch (r->type) switch (r->type)
{ {
case ERT_THROW: case ERT_THROW:
/* Don't remove ERT_THROW regions if their outer region /* Don't remove ERT_THROW regions if their outer region
is reachable. */ is reachable. */
if (r->outer && reachable[r->outer->region_number]) if (r->outer && TEST_BIT (reachable, r->outer->region_number))
kill_it = false; kill_it = false;
break; break;
case ERT_MUST_NOT_THROW: case ERT_MUST_NOT_THROW:
/* MUST_NOT_THROW regions are implementable solely in the /* MUST_NOT_THROW regions are implementable solely in the
runtime, but their existence continues to affect calls runtime, but we need them when inlining function.
within that region. Never delete them here. */
kill_it = false; Keep them if outer region is not MUST_NOT_THROW a well
and if they contain some statement that might unwind through
them. */
if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
&& (!contains_stmt
|| can_be_reached_by_runtime (contains_stmt, r)))
kill_it = false;
break; break;
case ERT_TRY: case ERT_TRY:
{ {
/* TRY regions are reachable if any of its CATCH regions /* TRY regions are reachable if any of its CATCH regions
are reachable. */ are reachable. */
struct eh_region *c; struct eh_region *c;
for (c = r->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch) for (c = r->u.eh_try.eh_catch; c;
if (reachable[c->region_number]) c = c->u.eh_catch.next_catch)
if (TEST_BIT (reachable, c->region_number))
{ {
kill_it = false; kill_it = false;
break; break;
...@@ -697,11 +771,91 @@ remove_unreachable_regions (rtx insns) ...@@ -697,11 +771,91 @@ remove_unreachable_regions (rtx insns)
} }
if (kill_it) if (kill_it)
remove_eh_handler (r); {
if (dump_file)
fprintf (dump_file, "Removing unreachable eh region %i\n",
r->region_number);
remove_eh_handler (r);
}
} }
} }
#ifdef ENABLE_CHECKING
verify_eh_tree (cfun);
#endif
}
/* Return array mapping LABEL_DECL_UID to region such that region's tree_label
is identical to label. */
VEC(int,heap) *
label_to_region_map (void)
{
VEC(int,heap) * label_to_region = NULL;
int i;
VEC_safe_grow_cleared (int, heap, label_to_region,
cfun->cfg->last_label_uid + 1);
for (i = cfun->eh->last_region_number; i > 0; --i)
{
struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
if (r && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
{
if ((unsigned) LABEL_DECL_UID (r->tree_label) >
VEC_length (int, label_to_region))
VEC_safe_grow_cleared (int, heap, label_to_region,
LABEL_DECL_UID (r->tree_label));
VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
i);
}
}
return label_to_region;
}
/* Return number of EH regions. */
int
num_eh_regions (void)
{
return cfun->eh->last_region_number + 1;
}
/* Remove all regions whose labels are not reachable from insns. */
static void
rtl_remove_unreachable_regions (rtx insns)
{
int i, *uid_region_num;
sbitmap reachable;
struct eh_region *r;
rtx insn;
uid_region_num = XCNEWVEC (int, get_max_uid ());
reachable = sbitmap_alloc (cfun->eh->last_region_number + 1);
sbitmap_zero (reachable);
free (reachable); for (i = cfun->eh->last_region_number; i > 0; --i)
{
r = VEC_index (eh_region, cfun->eh->region_array, i);
if (!r || r->region_number != i)
continue;
if (r->resume)
{
gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
uid_region_num[INSN_UID (r->resume)] = i;
}
if (r->label)
{
gcc_assert (!uid_region_num[INSN_UID (r->label)]);
uid_region_num[INSN_UID (r->label)] = i;
}
}
for (insn = insns; insn; insn = NEXT_INSN (insn))
SET_BIT (reachable, uid_region_num[INSN_UID (insn)]);
remove_unreachable_regions (reachable, NULL);
sbitmap_free (reachable);
free (uid_region_num); free (uid_region_num);
} }
...@@ -726,7 +880,7 @@ convert_from_eh_region_ranges (void) ...@@ -726,7 +880,7 @@ convert_from_eh_region_ranges (void)
region->label = DECL_RTL_IF_SET (region->tree_label); region->label = DECL_RTL_IF_SET (region->tree_label);
} }
remove_unreachable_regions (insns); rtl_remove_unreachable_regions (insns);
} }
static void static void
...@@ -2324,6 +2478,17 @@ maybe_remove_eh_handler (rtx label) ...@@ -2324,6 +2478,17 @@ maybe_remove_eh_handler (rtx label)
remove_eh_handler (region); remove_eh_handler (region);
} }
/* Remove Eh region R that has turned out to have no code in its handler. */
void
remove_eh_region (int r)
{
struct eh_region *region;
region = VEC_index (eh_region, cfun->eh->region_array, r);
remove_eh_handler (region);
}
/* Invokes CALLBACK for every exception handler label. Only used by old /* Invokes CALLBACK for every exception handler label. Only used by old
loop hackery; should not be used by new code. */ loop hackery; should not be used by new code. */
......
...@@ -19,6 +19,8 @@ You should have received a copy of the GNU General Public License ...@@ -19,6 +19,8 @@ You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */ <http://www.gnu.org/licenses/>. */
#include "sbitmap.h"
#include "vecprim.h"
struct function; struct function;
...@@ -61,6 +63,7 @@ extern void init_eh_for_function (void); ...@@ -61,6 +63,7 @@ extern void init_eh_for_function (void);
extern rtx reachable_handlers (rtx); extern rtx reachable_handlers (rtx);
extern void maybe_remove_eh_handler (rtx); extern void maybe_remove_eh_handler (rtx);
void remove_eh_region (int);
extern void convert_from_eh_region_ranges (void); extern void convert_from_eh_region_ranges (void);
extern unsigned int convert_to_eh_region_ranges (void); extern unsigned int convert_to_eh_region_ranges (void);
...@@ -174,3 +177,6 @@ struct throw_stmt_node GTY(()) ...@@ -174,3 +177,6 @@ struct throw_stmt_node GTY(())
extern struct htab *get_eh_throw_stmt_table (struct function *); extern struct htab *get_eh_throw_stmt_table (struct function *);
extern void set_eh_throw_stmt_table (struct function *, struct htab *); extern void set_eh_throw_stmt_table (struct function *, struct htab *);
extern void remove_unreachable_regions (sbitmap, sbitmap);
extern VEC(int,heap) * label_to_region_map (void);
extern int num_eh_regions (void);
...@@ -562,6 +562,7 @@ init_optimization_passes (void) ...@@ -562,6 +562,7 @@ init_optimization_passes (void)
NEXT_PASS (pass_simple_dse); NEXT_PASS (pass_simple_dse);
NEXT_PASS (pass_tail_recursion); NEXT_PASS (pass_tail_recursion);
NEXT_PASS (pass_convert_switch); NEXT_PASS (pass_convert_switch);
NEXT_PASS (pass_cleanup_eh);
NEXT_PASS (pass_profile); NEXT_PASS (pass_profile);
NEXT_PASS (pass_local_pure_const); NEXT_PASS (pass_local_pure_const);
} }
...@@ -589,6 +590,7 @@ init_optimization_passes (void) ...@@ -589,6 +590,7 @@ init_optimization_passes (void)
/* Initial scalar cleanups before alias computation. /* Initial scalar cleanups before alias computation.
They ensure memory accesses are not indirect wherever possible. */ They ensure memory accesses are not indirect wherever possible. */
NEXT_PASS (pass_strip_predict_hints); NEXT_PASS (pass_strip_predict_hints);
NEXT_PASS (pass_cleanup_eh);
NEXT_PASS (pass_update_address_taken); NEXT_PASS (pass_update_address_taken);
NEXT_PASS (pass_rename_ssa_copies); NEXT_PASS (pass_rename_ssa_copies);
NEXT_PASS (pass_complete_unrolli); NEXT_PASS (pass_complete_unrolli);
...@@ -686,6 +688,7 @@ init_optimization_passes (void) ...@@ -686,6 +688,7 @@ init_optimization_passes (void)
NEXT_PASS (pass_phi_only_cprop); NEXT_PASS (pass_phi_only_cprop);
NEXT_PASS (pass_cd_dce); NEXT_PASS (pass_cd_dce);
NEXT_PASS (pass_tracer); NEXT_PASS (pass_tracer);
NEXT_PASS (pass_cleanup_eh);
/* FIXME: If DCE is not run before checking for uninitialized uses, /* FIXME: If DCE is not run before checking for uninitialized uses,
we may get false warnings (e.g., testsuite/gcc.dg/uninit-5.c). we may get false warnings (e.g., testsuite/gcc.dg/uninit-5.c).
......
...@@ -2639,3 +2639,274 @@ struct gimple_opt_pass pass_refactor_eh = ...@@ -2639,3 +2639,274 @@ struct gimple_opt_pass pass_refactor_eh =
TODO_dump_func /* todo_flags_finish */ TODO_dump_func /* todo_flags_finish */
} }
}; };
/* Walk statements, see what regions are really references and remove unreachable ones. */
static void
tree_remove_unreachable_handlers (void)
{
sbitmap reachable, contains_stmt;
VEC(int,heap) * label_to_region;
basic_block bb;
label_to_region = label_to_region_map ();
reachable = sbitmap_alloc (num_eh_regions ());
sbitmap_zero (reachable);
contains_stmt = sbitmap_alloc (num_eh_regions ());
sbitmap_zero (contains_stmt);
FOR_EACH_BB (bb)
{
gimple_stmt_iterator gsi;
int region;
bool has_eh_preds = false;
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds) if (e->flags & EDGE_EH)
has_eh_preds = true;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_LABEL && has_eh_preds)
{
int uid = LABEL_DECL_UID (gimple_label_label (stmt));
if (uid <= cfun->cfg->last_label_uid)
{
int region = VEC_index (int, label_to_region, uid);
SET_BIT (reachable, region);
}
}
if (gimple_code (stmt) == RESX)
SET_BIT (reachable, gimple_resx_region (stmt));
if ((region = lookup_stmt_eh_region (stmt)) >= 0)
SET_BIT (contains_stmt, region);
}
}
if (dump_file)
{
fprintf (dump_file, "Before removal of unreachable regions:\n");
dump_eh_tree (dump_file, cfun);
fprintf (dump_file, "Reachable regions: ");
dump_sbitmap_file (dump_file, reachable);
fprintf (dump_file, "Regions containing insns: ");
dump_sbitmap_file (dump_file, contains_stmt);
}
remove_unreachable_regions (reachable, contains_stmt);
sbitmap_free (reachable);
sbitmap_free (contains_stmt);
VEC_free (int, heap, label_to_region);
if (dump_file)
{
fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
dump_eh_tree (dump_file, cfun);
fprintf (dump_file, "\n\n");
}
}
/* Pattern match emtpy EH receiver looking like:
save_filt.6352_662 = [filter_expr] <<<filter object>>>;
save_eptr.6351_663 = [exc_ptr_expr] <<<exception object>>>;
<<<exception object>>> = save_eptr.6351_663;
<<<filter object>>> = save_filt.6352_662;
resx 1
*/
static int
tree_empty_eh_handler_p (basic_block bb)
{
gimple_stmt_iterator gsi;
int region;
gsi = gsi_last_bb (bb);
/* RESX */
if (gsi_end_p (gsi))
return 0;
if (gimple_code (gsi_stmt (gsi)) != GIMPLE_RESX)
return 0;
region = gimple_resx_region (gsi_stmt (gsi));
/* filter_object set. */
gsi_prev (&gsi);
if (gsi_end_p (gsi))
return 0;
if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
return 0;
if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != FILTER_EXPR)
return 0;
/* filter_object set. */
gsi_prev (&gsi);
if (gsi_end_p (gsi))
return 0;
if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
return 0;
if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != EXC_PTR_EXPR)
return 0;
/* filter_object get. */
gsi_prev (&gsi);
if (gsi_end_p (gsi))
return 0;
if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
return 0;
if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != EXC_PTR_EXPR)
return 0;
/* filter_object get. */
gsi_prev (&gsi);
if (gsi_end_p (gsi))
return 0;
if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
return 0;
if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != FILTER_EXPR)
return 0;
/* label. */
gsi_prev (&gsi);
if (gsi_end_p (gsi))
return 0;
if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
return region;
else
return 0;
}
static bool dominance_info_invalidated;
/* Look for basic blocks containing empty exception handler and remove them.
This is similar to jump forwarding, just across EH edges. */
static bool
cleanup_empty_eh (basic_block bb)
{
int region;
/* When handler of EH region winds up to be empty, we can safely
remove it. This leads to inner EH regions to be redirected
to outer one, if present in function. So we need to rebuild
EH edges in all sources. */
if ((region = tree_empty_eh_handler_p (bb)))
{
edge_iterator ei;
edge e;
gimple_stmt_iterator si;
remove_eh_region (region);
/* It is safe to mark symbol for renaming because we have abnormal PHI
here. Once EH edges are made redirectable we might need to add here
similar updating as jump threading does. */
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (gsi_stmt (si))));
while ((e = ei_safe_edge (ei_start (bb->preds))))
{
basic_block src = e->src;
gcc_assert (e->flags & EDGE_EH);
for (ei = ei_start (src->succs); (e = ei_safe_edge (ei));)
{
if (e->flags & EDGE_EH)
{
remove_edge (e);
dominance_info_invalidated = true;
}
else
ei_next (&ei);
}
if (!stmt_can_throw_internal (last_stmt (src)))
continue;
make_eh_edges (last_stmt (src));
FOR_EACH_EDGE (e, ei, src->succs) if (e->flags & EDGE_EH)
{
dominance_info_invalidated = true;
for (si = gsi_start_phis (e->dest); !gsi_end_p (si);
gsi_next (&si))
mark_sym_for_renaming (SSA_NAME_VAR
(PHI_RESULT (gsi_stmt (si))));
}
}
if (dump_file)
fprintf (dump_file, "Empty EH handler %i removed\n", region);
delete_basic_block (bb);
return true;
}
return false;
}
/* Perform cleanups and lowering of exception handling
1) cleanups regions with handlers doing nothing are optimized out
2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
3) Info about regions that are containing instructions, and regions
reachable via local EH edges is collected
4) Eh tree is pruned for regions no longer neccesary.
*/
static unsigned int
cleanup_eh (void)
{
bool changed = false;
basic_block bb;
int i;
if (!cfun->eh)
return 0;
if (dump_file)
{
fprintf (dump_file, "Before cleanups:\n");
dump_eh_tree (dump_file, cfun);
}
dominance_info_invalidated = false;
/* We cannot use FOR_EACH_BB, since the basic blocks may get removed. */
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
bb = BASIC_BLOCK (i);
if (bb)
changed |= cleanup_empty_eh (bb);
}
if (dominance_info_invalidated)
{
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
}
/* Removing contained cleanup can render MUST_NOT_THROW regions empty. */
if (changed)
delete_unreachable_blocks ();
tree_remove_unreachable_handlers ();
if (dump_file)
{
fprintf (dump_file, "After cleanups:\n");
dump_eh_tree (dump_file, cfun);
}
return (changed ? TODO_cleanup_cfg | TODO_update_ssa : 0);
}
struct gimple_opt_pass pass_cleanup_eh = {
{
GIMPLE_PASS,
"ehcleanup", /* name */
NULL, /* gate */
cleanup_eh, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
TV_TREE_EH, /* tv_id */
PROP_gimple_lcf, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func /* todo_flags_finish */
}
};
...@@ -309,6 +309,7 @@ extern struct gimple_opt_pass pass_tree_profile; ...@@ -309,6 +309,7 @@ extern struct gimple_opt_pass pass_tree_profile;
extern struct gimple_opt_pass pass_early_tree_profile; extern struct gimple_opt_pass pass_early_tree_profile;
extern struct gimple_opt_pass pass_cleanup_cfg; extern struct gimple_opt_pass pass_cleanup_cfg;
extern struct gimple_opt_pass pass_referenced_vars; extern struct gimple_opt_pass pass_referenced_vars;
extern struct gimple_opt_pass pass_cleanup_eh;
extern struct gimple_opt_pass pass_fixup_cfg; extern struct gimple_opt_pass pass_fixup_cfg;
extern struct gimple_opt_pass pass_sra; extern struct gimple_opt_pass pass_sra;
extern struct gimple_opt_pass pass_sra_early; extern struct gimple_opt_pass pass_sra_early;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment