Commit c6bb4c93 by Vladimir Makarov Committed by Vladimir Makarov

re PR rtl-optimization/38280 (Revision 142207 breaks 416.gamess/481.wrf in SPEC CPU 2006)

2008-12-01  Vladimir Makarov  <vmakarov@redhat.com>

	PR rtl-optimization/38280
	* ira-build.c (loop_is_inside_p, regno_allocno_order_compare_func,
	ira_rebuild_regno_allocno_list): New functions.
	(regno_allocnos): New static variable.
	(remove_unnecessary_allocnos): Allocate/deallocate regno_allocnos.
	Call ira_rebuild_regno_allocno_list.

From-SVN: r142336
parent c3708565
2008-12-01 Vladimir Makarov <vmakarov@redhat.com>
PR rtl-optimization/38280
* ira-build.c (loop_is_inside_p, regno_allocno_order_compare_func,
ira_rebuild_regno_allocno_list): New functions.
(regno_allocnos): New static variable.
(remove_unnecessary_allocnos): Allocate/deallocate regno_allocnos.
Call ira_rebuild_regno_allocno_list.
2008-12-01 David Daney <ddaney@caviumnetworks.com>
Adam Nemet <anemet@caviumnetworks.com>
......@@ -1800,100 +1800,172 @@ remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
}
}
/* Return TRUE if NODE is inside PARENT. */
static bool
loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
{
for (node = node->parent; node != NULL; node = node->parent)
if (node == parent)
return true;
return false;
}
/* Sort allocnos according to their order in regno allocno list. */
static int
regno_allocno_order_compare_func (const void *v1p, const void *v2p)
{
ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
if (loop_is_inside_p (n1, n2))
return -1;
else if (loop_is_inside_p (n2, n1))
return 1;
/* If allocnos are equally good, sort by allocno numbers, so that
the results of qsort leave nothing to chance. We put allocnos
with higher number first in the list because it is the original
order for allocnos from loops on the same levels. */
return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
}
/* This array is used to sort allocnos to restore allocno order in
the regno allocno list. */
static ira_allocno_t *regno_allocnos;
/* Restore allocno order for REGNO in the regno allocno list. */
static void
ira_rebuild_regno_allocno_list (int regno)
{
int i, n;
ira_allocno_t a;
for (n = 0, a = ira_regno_allocno_map[regno];
a != NULL;
a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
regno_allocnos[n++] = a;
ira_assert (n > 0);
qsort (regno_allocnos, n, sizeof (ira_allocno_t),
regno_allocno_order_compare_func);
for (i = 1; i < n; i++)
ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
ira_regno_allocno_map[regno] = regno_allocnos[0];
if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
}
/* Remove allocnos from loops removed from the allocation
consideration. */
static void
remove_unnecessary_allocnos (void)
{
int regno;
bool merged_p;
bool merged_p, rebuild_p;
enum reg_class cover_class;
ira_allocno_t a, prev_a, next_a, parent_a;
ira_loop_tree_node_t a_node, parent;
allocno_live_range_t r;
merged_p = false;
regno_allocnos = NULL;
for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
for (prev_a = NULL, a = ira_regno_allocno_map[regno];
a != NULL;
a = next_a)
{
next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
a_node = ALLOCNO_LOOP_TREE_NODE (a);
if (! a_node->to_remove_p)
prev_a = a;
else
{
for (parent = a_node->parent;
(parent_a = parent->regno_allocno_map[regno]) == NULL
&& parent->to_remove_p;
parent = parent->parent)
;
if (parent_a == NULL)
{
{
rebuild_p = false;
for (prev_a = NULL, a = ira_regno_allocno_map[regno];
a != NULL;
a = next_a)
{
next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
a_node = ALLOCNO_LOOP_TREE_NODE (a);
if (! a_node->to_remove_p)
prev_a = a;
else
{
for (parent = a_node->parent;
(parent_a = parent->regno_allocno_map[regno]) == NULL
&& parent->to_remove_p;
parent = parent->parent)
;
if (parent_a == NULL)
{
/* There are no allocnos with the same regno in upper
region -- just move the allocno to the upper
region. */
prev_a = a;
ALLOCNO_LOOP_TREE_NODE (a) = parent;
parent->regno_allocno_map[regno] = a;
bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
}
else
{
/* Remove the allocno and update info of allocno in
the upper region. */
if (prev_a == NULL)
ira_regno_allocno_map[regno] = next_a;
else
ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
r = ALLOCNO_LIVE_RANGES (a);
change_allocno_in_range_list (r, parent_a);
ALLOCNO_LIVE_RANGES (parent_a)
= ira_merge_allocno_live_ranges
prev_a = a;
ALLOCNO_LOOP_TREE_NODE (a) = parent;
parent->regno_allocno_map[regno] = a;
bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
rebuild_p = true;
}
else
{
/* Remove the allocno and update info of allocno in
the upper region. */
if (prev_a == NULL)
ira_regno_allocno_map[regno] = next_a;
else
ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
r = ALLOCNO_LIVE_RANGES (a);
change_allocno_in_range_list (r, parent_a);
ALLOCNO_LIVE_RANGES (parent_a)
= ira_merge_allocno_live_ranges
(r, ALLOCNO_LIVE_RANGES (parent_a));
merged_p = true;
ALLOCNO_LIVE_RANGES (a) = NULL;
IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (parent_a),
ALLOCNO_CONFLICT_HARD_REGS (a));
merged_p = true;
ALLOCNO_LIVE_RANGES (a) = NULL;
IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (parent_a),
ALLOCNO_CONFLICT_HARD_REGS (a));
#ifdef STACK_REGS
if (ALLOCNO_NO_STACK_REG_P (a))
ALLOCNO_NO_STACK_REG_P (parent_a) = true;
if (ALLOCNO_NO_STACK_REG_P (a))
ALLOCNO_NO_STACK_REG_P (parent_a) = true;
#endif
ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
IOR_HARD_REG_SET
(ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
ALLOCNO_CALLS_CROSSED_NUM (parent_a)
+= ALLOCNO_CALLS_CROSSED_NUM (a);
ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
+= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
if (! ALLOCNO_BAD_SPILL_P (a))
ALLOCNO_BAD_SPILL_P (parent_a) = false;
ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
IOR_HARD_REG_SET
(ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
ALLOCNO_CALLS_CROSSED_NUM (parent_a)
+= ALLOCNO_CALLS_CROSSED_NUM (a);
ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
+= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
if (! ALLOCNO_BAD_SPILL_P (a))
ALLOCNO_BAD_SPILL_P (parent_a) = false;
#ifdef STACK_REGS
if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a) = true;
if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a) = true;
#endif
cover_class = ALLOCNO_COVER_CLASS (a);
ira_assert (cover_class == ALLOCNO_COVER_CLASS (parent_a));
ira_allocate_and_accumulate_costs
(&ALLOCNO_HARD_REG_COSTS (parent_a), cover_class,
ALLOCNO_HARD_REG_COSTS (a));
ira_allocate_and_accumulate_costs
(&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
cover_class,
ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
ALLOCNO_COVER_CLASS_COST (parent_a)
+= ALLOCNO_COVER_CLASS_COST (a);
ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
finish_allocno (a);
}
}
}
cover_class = ALLOCNO_COVER_CLASS (a);
ira_assert (cover_class == ALLOCNO_COVER_CLASS (parent_a));
ira_allocate_and_accumulate_costs
(&ALLOCNO_HARD_REG_COSTS (parent_a), cover_class,
ALLOCNO_HARD_REG_COSTS (a));
ira_allocate_and_accumulate_costs
(&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
cover_class,
ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
ALLOCNO_COVER_CLASS_COST (parent_a)
+= ALLOCNO_COVER_CLASS_COST (a);
ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
finish_allocno (a);
}
}
}
if (rebuild_p)
/* We need to restore the order in regno allocno list. */
{
if (regno_allocnos == NULL)
regno_allocnos
= (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
* ira_allocnos_num);
ira_rebuild_regno_allocno_list (regno);
}
}
if (merged_p)
ira_rebuild_start_finish_chains ();
if (regno_allocnos != NULL)
ira_free (regno_allocnos);
}
/* Remove loops from consideration. We remove loops for which a
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment