Commit 65833da6 by Richard Biener Committed by Richard Biener

tree-ssa-pre.c (postorder, [...]): Make locals ...

2016-04-18  Richard Biener  <rguenther@suse.de>

	* tree-ssa-pre.c (postorder, postorder_num): Make locals ...
	(compute_antic): ... here.  For partial antic use regular
	postorder and scrap iteration.
	(compute_partial_antic_aux): Remove unused return value.
	(init_pre): Do not allocate postorder.
	(fini_pre): Do not free postorder.

From-SVN: r235130
parent b269f477
2016-04-18 Richard Biener <rguenther@suse.de>
* tree-ssa-pre.c (postorder, postorder_num): Make locals ...
(compute_antic): ... here. For partial antic use regular
postorder and scrap iteration.
(compute_partial_antic_aux): Remove unused return value.
(init_pre): Do not allocate postorder.
(fini_pre): Do not free postorder.
2016-04-18 Richard Biener <rguenther@suse.de>
PR middle-end/37870
* expmed.c (extract_bit_field_1): Remove broken case
using a wider MODE_INT mode.
......
......@@ -432,10 +432,6 @@ typedef struct bb_bitmap_sets
#define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
/* Basic block list in postorder. */
static int *postorder;
static int postorder_num;
/* This structure is used to keep track of statistics on what
optimization PRE was able to perform. */
static struct
......@@ -2209,11 +2205,10 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
- ANTIC_IN[BLOCK])
*/
static bool
static void
compute_partial_antic_aux (basic_block block,
bool block_has_abnormal_pred_edge)
{
bool changed = false;
bitmap_set_t old_PA_IN;
bitmap_set_t PA_OUT;
edge e;
......@@ -2312,9 +2307,6 @@ compute_partial_antic_aux (basic_block block,
dependent_clean (PA_IN (block), ANTIC_IN (block));
if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
changed = true;
maybe_dump_sets:
if (dump_file && (dump_flags & TDF_DETAILS))
{
......@@ -2327,7 +2319,6 @@ compute_partial_antic_aux (basic_block block,
bitmap_set_free (old_PA_IN);
if (PA_OUT)
bitmap_set_free (PA_OUT);
return changed;
}
/* Compute ANTIC and partial ANTIC sets. */
......@@ -2349,23 +2340,33 @@ compute_antic (void)
FOR_ALL_BB_FN (block, cfun)
{
BB_VISITED (block) = 0;
FOR_EACH_EDGE (e, ei, block->preds)
if (e->flags & EDGE_ABNORMAL)
{
bitmap_set_bit (has_abnormal_preds, block->index);
/* We also anticipate nothing. */
BB_VISITED (block) = 1;
break;
}
BB_VISITED (block) = 0;
/* While we are here, give empty ANTIC_IN sets to each block. */
ANTIC_IN (block) = bitmap_set_new ();
if (do_partial_partial)
PA_IN (block) = bitmap_set_new ();
}
/* At the exit block we anticipate nothing. */
BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
/* For ANTIC computation we need a postorder that also guarantees that
a block with a single successor is visited after its successor.
RPO on the inverted CFG has this property. */
int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
int postorder_num = inverted_post_order_compute (postorder);
sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
bitmap_ones (worklist);
while (changed)
......@@ -2403,39 +2404,21 @@ compute_antic (void)
if (do_partial_partial)
{
bitmap_ones (worklist);
num_iterations = 0;
changed = true;
while (changed)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Starting iteration %d\n", num_iterations);
num_iterations++;
changed = false;
/* For partial antic we ignore backedges and thus we do not need
to perform any iteration when we process blocks in postorder. */
postorder_num = pre_and_rev_post_order_compute (NULL, postorder, false);
for (i = postorder_num - 1 ; i >= 0; i--)
{
if (bitmap_bit_p (worklist, postorder[i]))
{
basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
bitmap_clear_bit (worklist, block->index);
if (compute_partial_antic_aux (block,
compute_partial_antic_aux (block,
bitmap_bit_p (has_abnormal_preds,
block->index)))
{
FOR_EACH_EDGE (e, ei, block->preds)
bitmap_set_bit (worklist, e->src->index);
changed = true;
}
block->index));
}
}
/* Theoretically possible, but *highly* unlikely. */
gcc_checking_assert (num_iterations < 500);
}
statistics_histogram_event (cfun, "compute_partial_antic iterations",
num_iterations);
}
sbitmap_free (has_abnormal_preds);
sbitmap_free (worklist);
free (postorder);
}
......@@ -4694,12 +4677,6 @@ init_pre (void)
connect_infinite_loops_to_exit ();
memset (&pre_stats, 0, sizeof (pre_stats));
/* For ANTIC computation we need a postorder that also guarantees that
a block with a single successor is visited after its successor.
RPO on the inverted CFG has this property. */
postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
postorder_num = inverted_post_order_compute (postorder);
alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
calculate_dominance_info (CDI_DOMINATORS);
......@@ -4722,7 +4699,6 @@ init_pre (void)
static void
fini_pre ()
{
free (postorder);
value_expressions.release ();
BITMAP_FREE (inserted_exprs);
bitmap_obstack_release (&grand_bitmap_obstack);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment