Commit f91a0beb by Kenneth Zadeck Committed by Kenneth Zadeck

cfganal.c (flow_reverse_top_sort_order_compute): Renamed to post_order_compute…

cfganal.c (flow_reverse_top_sort_order_compute): Renamed to post_order_compute and additional parameter added which allows the...

2005-12-20  Kenneth Zadeck <zadeck@naturalbridge.com>

	* cfganal.c (flow_reverse_top_sort_order_compute):
	Renamed to post_order_compute and additional parameter added which
	allows the inclusion of entry and exit blocks into list.
	(mark_dfs_back_edges): Fixed comment.
	(flow_depth_first_order_compute): Renamed to
	pre_and_rev_post_order_compute additional parameter added which
	allows the inclusion of entry and exit blocks into list.
	* global.c (set_up_bb_rts_numbers): Call to
	flow_reverse_top_sort_order_compute renamed to
	post_order_compute.
	* var-tracking.c (vt_stack_adjustments): Fixed comment.
	(vt_find_locations): Call to
	flow_depth_first_order_compute renamed to
	pre_and_rev_post_order_compute.
	* cfgloop.c (flow_find_loops): Ditto.
	* tree-ssa-reassoc.c (init_reassoc): Ditto.
	* df.c (df_analyze_1, df_analyze_subcfg): Calls to
	flow_reverse_top_sort_order_compute renamed to post_order_compute
	and calls to flow_reverse_top_sort_order_compute renamed to
	post_order_compute.
	* basic_block.h: Ditto.

From-SVN: r108874
parent df239d2b
2005-12-20 Kenneth Zadeck <zadeck@naturalbridge.com>
* cfganal.c (flow_reverse_top_sort_order_compute):
Renamed to post_order_compute and additional parameter added which
allows the inclusion of entry and exit blocks into list.
(mark_dfs_back_edges): Fixed comment.
(flow_depth_first_order_compute): Renamed to
pre_and_rev_post_order_compute additional parameter added which
allows the inclusion of entry and exit blocks into list.
* global.c (set_up_bb_rts_numbers): Call to
flow_reverse_top_sort_order_compute renamed to
post_order_compute.
* var-tracking.c (vt_stack_adjustments): Fixed comment.
(vt_find_locations): Call to
flow_depth_first_order_compute renamed to
pre_and_rev_post_order_compute.
* cfgloop.c (flow_find_loops): Ditto.
* tree-ssa-reassoc.c (init_reassoc): Ditto.
* df.c (df_analyze_1, df_analyze_subcfg): Calls to
flow_reverse_top_sort_order_compute renamed to post_order_compute
and calls to flow_reverse_top_sort_order_compute renamed to
post_order_compute.
* basic_block.h: Ditto.
2005-12-20 Roger Sayle <roger@eyesopen.com> 2005-12-20 Roger Sayle <roger@eyesopen.com>
Paolo Bonzini <bonzini@gnu.org> Paolo Bonzini <bonzini@gnu.org>
......
...@@ -503,8 +503,8 @@ extern edge redirect_edge_succ_nodup (edge, basic_block); ...@@ -503,8 +503,8 @@ extern edge redirect_edge_succ_nodup (edge, basic_block);
extern void redirect_edge_pred (edge, basic_block); extern void redirect_edge_pred (edge, basic_block);
extern basic_block create_basic_block_structure (rtx, rtx, rtx, basic_block); extern basic_block create_basic_block_structure (rtx, rtx, rtx, basic_block);
extern void clear_bb_flags (void); extern void clear_bb_flags (void);
extern void flow_reverse_top_sort_order_compute (int *); extern int post_order_compute (int *, bool);
extern int flow_depth_first_order_compute (int *, int *); extern int pre_and_rev_post_order_compute (int *, int *, bool);
extern int dfs_enumerate_from (basic_block, int, extern int dfs_enumerate_from (basic_block, int,
bool (*)(basic_block, void *), bool (*)(basic_block, void *),
basic_block *, int, void *); basic_block *, int, void *);
......
...@@ -152,7 +152,7 @@ could_fall_through (basic_block src, basic_block target) ...@@ -152,7 +152,7 @@ could_fall_through (basic_block src, basic_block target)
Steven Muchnick Steven Muchnick
Morgan Kaufmann, 1997 Morgan Kaufmann, 1997
and heavily borrowed from flow_depth_first_order_compute. */ and heavily borrowed from pre_and_rev_post_order_compute. */
bool bool
mark_dfs_back_edges (void) mark_dfs_back_edges (void)
...@@ -645,16 +645,20 @@ connect_infinite_loops_to_exit (void) ...@@ -645,16 +645,20 @@ connect_infinite_loops_to_exit (void)
return; return;
} }
/* Compute reverse top sort order. */ /* Compute reverse top sort order.
This is computing a post order numbering of the graph. */
void int
flow_reverse_top_sort_order_compute (int *rts_order) post_order_compute (int *post_order, bool include_entry_exit)
{ {
edge_iterator *stack; edge_iterator *stack;
int sp; int sp;
int postnum = 0; int post_order_num = 0;
sbitmap visited; sbitmap visited;
if (include_entry_exit)
post_order[post_order_num++] = EXIT_BLOCK;
/* Allocate stack for back-tracking up CFG. */ /* Allocate stack for back-tracking up CFG. */
stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator)); stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator));
sp = 0; sp = 0;
...@@ -690,12 +694,12 @@ flow_reverse_top_sort_order_compute (int *rts_order) ...@@ -690,12 +694,12 @@ flow_reverse_top_sort_order_compute (int *rts_order)
time, check its successors. */ time, check its successors. */
stack[sp++] = ei_start (dest->succs); stack[sp++] = ei_start (dest->succs);
else else
rts_order[postnum++] = dest->index; post_order[post_order_num++] = dest->index;
} }
else else
{ {
if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR) if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
rts_order[postnum++] = src->index; post_order[post_order_num++] = src->index;
if (!ei_one_before_end_p (ei)) if (!ei_one_before_end_p (ei))
ei_next (&stack[sp - 1]); ei_next (&stack[sp - 1]);
...@@ -704,30 +708,50 @@ flow_reverse_top_sort_order_compute (int *rts_order) ...@@ -704,30 +708,50 @@ flow_reverse_top_sort_order_compute (int *rts_order)
} }
} }
if (include_entry_exit)
post_order[post_order_num++] = ENTRY_BLOCK;
free (stack); free (stack);
sbitmap_free (visited); sbitmap_free (visited);
return post_order_num;
} }
/* Compute the depth first search order and store in the array /* Compute the depth first search order and store in the array
DFS_ORDER if nonzero, marking the nodes visited in VISITED. If PRE_ORDER if nonzero, marking the nodes visited in VISITED. If
RC_ORDER is nonzero, return the reverse completion number for each REV_POST_ORDER is nonzero, return the reverse completion number for each
node. Returns the number of nodes visited. A depth first search node. Returns the number of nodes visited. A depth first search
tries to get as far away from the starting point as quickly as tries to get as far away from the starting point as quickly as
possible. */ possible.
pre_order is a really a preorder numbering of the graph.
rev_post_order is really a reverse postorder numbering of the graph.
*/
int int
flow_depth_first_order_compute (int *dfs_order, int *rc_order) pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
bool include_entry_exit)
{ {
edge_iterator *stack; edge_iterator *stack;
int sp; int sp;
int dfsnum = 0; int pre_order_num = 0;
int rcnum = n_basic_blocks - 1 - NUM_FIXED_BLOCKS; int rev_post_order_num = n_basic_blocks - 1;
sbitmap visited; sbitmap visited;
/* Allocate stack for back-tracking up CFG. */ /* Allocate stack for back-tracking up CFG. */
stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator)); stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator));
sp = 0; sp = 0;
if (include_entry_exit)
{
if (pre_order)
pre_order[pre_order_num] = ENTRY_BLOCK;
pre_order_num++;
if (rev_post_order)
rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
}
else
rev_post_order_num -= NUM_FIXED_BLOCKS;
/* Allocate bitmap to track nodes that have been visited. */ /* Allocate bitmap to track nodes that have been visited. */
visited = sbitmap_alloc (last_basic_block); visited = sbitmap_alloc (last_basic_block);
...@@ -754,27 +778,27 @@ flow_depth_first_order_compute (int *dfs_order, int *rc_order) ...@@ -754,27 +778,27 @@ flow_depth_first_order_compute (int *dfs_order, int *rc_order)
/* Mark that we have visited the destination. */ /* Mark that we have visited the destination. */
SET_BIT (visited, dest->index); SET_BIT (visited, dest->index);
if (dfs_order) if (pre_order)
dfs_order[dfsnum] = dest->index; pre_order[pre_order_num] = dest->index;
dfsnum++; pre_order_num++;
if (EDGE_COUNT (dest->succs) > 0) if (EDGE_COUNT (dest->succs) > 0)
/* Since the DEST node has been visited for the first /* Since the DEST node has been visited for the first
time, check its successors. */ time, check its successors. */
stack[sp++] = ei_start (dest->succs); stack[sp++] = ei_start (dest->succs);
else if (rc_order) else if (rev_post_order)
/* There are no successors for the DEST node so assign /* There are no successors for the DEST node so assign
its reverse completion number. */ its reverse completion number. */
rc_order[rcnum--] = dest->index; rev_post_order[rev_post_order_num--] = dest->index;
} }
else else
{ {
if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR
&& rc_order) && rev_post_order)
/* There are no more successors for the SRC node /* There are no more successors for the SRC node
so assign its reverse completion number. */ so assign its reverse completion number. */
rc_order[rcnum--] = src->index; rev_post_order[rev_post_order_num--] = src->index;
if (!ei_one_before_end_p (ei)) if (!ei_one_before_end_p (ei))
ei_next (&stack[sp - 1]); ei_next (&stack[sp - 1]);
...@@ -786,11 +810,22 @@ flow_depth_first_order_compute (int *dfs_order, int *rc_order) ...@@ -786,11 +810,22 @@ flow_depth_first_order_compute (int *dfs_order, int *rc_order)
free (stack); free (stack);
sbitmap_free (visited); sbitmap_free (visited);
/* The number of nodes visited should be the number of blocks minus if (include_entry_exit)
the entry and exit blocks which are not visited here. */ {
gcc_assert (dfsnum == n_basic_blocks - NUM_FIXED_BLOCKS); if (pre_order)
pre_order[pre_order_num] = EXIT_BLOCK;
pre_order_num++;
if (rev_post_order)
rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
/* The number of nodes visited should be the number of blocks. */
gcc_assert (pre_order_num == n_basic_blocks);
}
else
/* The number of nodes visited should be the number of blocks minus
the entry and exit blocks which are not visited here. */
gcc_assert (pre_order_num == n_basic_blocks - NUM_FIXED_BLOCKS);
return dfsnum; return pre_order_num;
} }
/* Compute the depth first search order on the _reverse_ graph and /* Compute the depth first search order on the _reverse_ graph and
......
...@@ -696,7 +696,7 @@ flow_loops_find (struct loops *loops) ...@@ -696,7 +696,7 @@ flow_loops_find (struct loops *loops)
natural loops will be found before inner natural loops. */ natural loops will be found before inner natural loops. */
dfs_order = xmalloc (n_basic_blocks * sizeof (int)); dfs_order = xmalloc (n_basic_blocks * sizeof (int));
rc_order = xmalloc (n_basic_blocks * sizeof (int)); rc_order = xmalloc (n_basic_blocks * sizeof (int));
flow_depth_first_order_compute (dfs_order, rc_order); pre_and_rev_post_order_compute (dfs_order, rc_order, false);
/* Save CFG derived information to avoid recomputing it. */ /* Save CFG derived information to avoid recomputing it. */
loops->cfg.dfs_order = dfs_order; loops->cfg.dfs_order = dfs_order;
......
...@@ -1996,8 +1996,8 @@ df_analyze_1 (struct df *df, bitmap blocks, int flags, int update) ...@@ -1996,8 +1996,8 @@ df_analyze_1 (struct df *df, bitmap blocks, int flags, int update)
df->rc_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS); df->rc_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS);
df->rts_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS); df->rts_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS);
flow_depth_first_order_compute (df->dfs_order, df->rc_order); pre_and_rev_post_order_compute (df->dfs_order, df->rc_order, false);
flow_reverse_top_sort_order_compute (df->rts_order); post_order_compute (df->rts_order, false);
if (aflags & DF_RD) if (aflags & DF_RD)
{ {
/* Compute the sets of gens and kills for the defs of each bb. */ /* Compute the sets of gens and kills for the defs of each bb. */
...@@ -2424,8 +2424,8 @@ df_analyze_subcfg (struct df *df, bitmap blocks, int flags) ...@@ -2424,8 +2424,8 @@ df_analyze_subcfg (struct df *df, bitmap blocks, int flags)
df->rc_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS); df->rc_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS);
df->rts_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS); df->rts_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS);
flow_depth_first_order_compute (df->dfs_order, df->rc_order); pre_and_rev_post_order_compute (df->dfs_order, df->rc_order, false);
flow_reverse_top_sort_order_compute (df->rts_order); post_order_compute (df->rts_order, false);
n_blocks = prune_to_subcfg (df->dfs_order, n_basic_blocks - NUM_FIXED_BLOCKS, blocks); n_blocks = prune_to_subcfg (df->dfs_order, n_basic_blocks - NUM_FIXED_BLOCKS, blocks);
prune_to_subcfg (df->rc_order, n_basic_blocks - NUM_FIXED_BLOCKS, blocks); prune_to_subcfg (df->rc_order, n_basic_blocks - NUM_FIXED_BLOCKS, blocks);
......
...@@ -2282,7 +2282,7 @@ set_up_bb_rts_numbers (void) ...@@ -2282,7 +2282,7 @@ set_up_bb_rts_numbers (void)
int *rts_order; int *rts_order;
rts_order = xmalloc (sizeof (int) * (n_basic_blocks - NUM_FIXED_BLOCKS)); rts_order = xmalloc (sizeof (int) * (n_basic_blocks - NUM_FIXED_BLOCKS));
flow_reverse_top_sort_order_compute (rts_order); post_order_compute (rts_order, false);
for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++) for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
BB_INFO_BY_INDEX (rts_order [i])->rts_number = i; BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
free (rts_order); free (rts_order);
......
...@@ -1424,7 +1424,7 @@ init_reassoc (void) ...@@ -1424,7 +1424,7 @@ init_reassoc (void)
/* Reverse RPO (Reverse Post Order) will give us something where /* Reverse RPO (Reverse Post Order) will give us something where
deeper loops come later. */ deeper loops come later. */
flow_depth_first_order_compute (NULL, bbs); pre_and_rev_post_order_compute (NULL, bbs, false);
bb_rank = xcalloc (last_basic_block + 1, sizeof (unsigned int)); bb_rank = xcalloc (last_basic_block + 1, sizeof (unsigned int));
operand_rank = htab_create (511, operand_entry_hash, operand_rank = htab_create (511, operand_entry_hash,
......
...@@ -485,7 +485,7 @@ bb_stack_adjust_offset (basic_block bb) ...@@ -485,7 +485,7 @@ bb_stack_adjust_offset (basic_block bb)
/* Compute stack adjustments for all blocks by traversing DFS tree. /* Compute stack adjustments for all blocks by traversing DFS tree.
Return true when the adjustments on all incoming edges are consistent. Return true when the adjustments on all incoming edges are consistent.
Heavily borrowed from flow_depth_first_order_compute. */ Heavily borrowed from pre_and_rev_post_order_compute. */
static bool static bool
vt_stack_adjustments (void) vt_stack_adjustments (void)
...@@ -1640,7 +1640,7 @@ vt_find_locations (void) ...@@ -1640,7 +1640,7 @@ vt_find_locations (void)
so that the data-flow runs faster. */ so that the data-flow runs faster. */
rc_order = xmalloc ((n_basic_blocks - NUM_FIXED_BLOCKS) * sizeof (int)); rc_order = xmalloc ((n_basic_blocks - NUM_FIXED_BLOCKS) * sizeof (int));
bb_order = xmalloc (last_basic_block * sizeof (int)); bb_order = xmalloc (last_basic_block * sizeof (int));
flow_depth_first_order_compute (NULL, rc_order); pre_and_rev_post_order_compute (NULL, rc_order, false);
for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++) for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
bb_order[rc_order[i]] = i; bb_order[rc_order[i]] = i;
free (rc_order); free (rc_order);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment