Commit 5bb51e1d by Eric Botcazou Committed by Eric Botcazou

re PR rtl-optimization/27616 (Infinite loop at -O1 and above in RTL CSE)

	PR rtl-optimization/27616
	* cse.c (table_size): New static variable.
	(new_basic_block): Initialize it to 0.
	(remove_from_table): Decrement it.
	(insert): Increment it.
	(fold_rtx_mem_1): New function, renamed from fold_rtx_mem.
	(fold_rtx_mem): Enforce a cap on the recursion depth.  Call
	fold_rtx_mem_1 if under the cap.
	(fold_rtx) <RTX_COMM_ARITH>: In the associative case, delay a little
	the lookup of the equivalent expression and test for equality of the
	first operand of the equivalent expression before in turn looking up
	an equivalent constant for the second operand.

From-SVN: r116683
parent afe08db5
2006-09-04 Eric Botcazou <ebotcazou@libertysurf.fr>
PR rtl-optimization/27616
* cse.c (table_size): New static variable.
(new_basic_block): Initialize it to 0.
(remove_from_table): Decrement it.
(insert): Increment it.
(fold_rtx_mem_1): New function, renamed from fold_rtx_mem.
(fold_rtx_mem): Enforce a cap on the recursion depth. Call
fold_rtx_mem_1 if under the cap.
(fold_rtx) <RTX_COMM_ARITH>: In the associative case, delay a little
the lookup of the equivalent expression and test for equality of the
first operand of the equivalent expression before in turn looking up
an equivalent constant for the second operand.
2006-09-02 Geoffrey Keating <geoffk@apple.com>
Revert this change:
......
......@@ -528,6 +528,10 @@ struct table_elt
static struct table_elt *table[HASH_SIZE];
/* Number of elements in the hash table. */
static unsigned int table_size;
/* Chain of `struct table_elt's made so far for this function
but currently removed from the table. */
......@@ -961,6 +965,8 @@ new_basic_block (void)
}
}
table_size = 0;
#ifdef HAVE_cc0
prev_insn = 0;
prev_insn_cc0 = 0;
......@@ -1371,6 +1377,8 @@ remove_from_table (struct table_elt *elt, unsigned int hash)
/* Now add it to the free element chain. */
elt->next_same_hash = free_element_chain;
free_element_chain = elt;
table_size--;
}
/* Look up X in the hash table and return its table element,
......@@ -1648,6 +1656,8 @@ insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mo
}
}
table_size++;
return elt;
}
......@@ -3432,10 +3442,10 @@ fold_rtx_subreg (rtx x, rtx insn)
return x;
}
/* Fold MEM. */
/* Fold MEM. Not to be called directly, see fold_rtx_mem instead. */
static rtx
fold_rtx_mem (rtx x, rtx insn)
fold_rtx_mem_1 (rtx x, rtx insn)
{
enum machine_mode mode = GET_MODE (x);
rtx new;
......@@ -3598,6 +3608,51 @@ fold_rtx_mem (rtx x, rtx insn)
}
}
/* Fold MEM. */
static rtx
fold_rtx_mem (rtx x, rtx insn)
{
/* To avoid infinite oscillations between fold_rtx and fold_rtx_mem,
refuse to allow recursion of the latter past n levels. This can
happen because fold_rtx_mem will try to fold the address of the
memory reference it is passed, i.e. conceptually throwing away
the MEM and reinjecting the bare address into fold_rtx. As a
result, patterns like
set (reg1)
(plus (reg)
(mem (plus (reg2) (const_int))))
set (reg2)
(plus (reg)
(mem (plus (reg1) (const_int))))
will defeat any "first-order" short-circuit put in either
function to prevent these infinite oscillations.
The heuristics for determining n is as follows: since each time
it is invoked fold_rtx_mem throws away a MEM, and since MEMs
are generically not nested, we assume that each invocation of
fold_rtx_mem corresponds to a new "top-level" operand, i.e.
the source or the destination of a SET. So fold_rtx_mem is
bound to stop or cycle before n recursions, n being the number
of expressions recorded in the hash table. We also leave some
play to account for the initial steps. */
static unsigned int depth;
rtx ret;
if (depth > 3 + table_size)
return x;
depth++;
ret = fold_rtx_mem_1 (x, insn);
depth--;
return ret;
}
/* If X is a nontrivial arithmetic operation on an argument
for which a constant value can be determined, return
the result of operating on that value, as a constant.
......@@ -4262,10 +4317,8 @@ fold_rtx (rtx x, rtx insn)
{
int is_shift
= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
rtx y = lookup_as_function (folded_arg0, code);
rtx inner_const;
rtx y, inner_const, new_const;
enum rtx_code associate_code;
rtx new_const;
if (is_shift
&& (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
......@@ -4278,11 +4331,9 @@ fold_rtx (rtx x, rtx insn)
break;
}
y = lookup_as_function (folded_arg0, code);
if (y == 0)
break;
inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
if (!inner_const || GET_CODE (inner_const) != CONST_INT)
break;
/* If we have compiled a statement like
"if (x == (x & mask1))", and now are looking at
......@@ -4292,6 +4343,10 @@ fold_rtx (rtx x, rtx insn)
if (XEXP (y, 0) == folded_arg0)
break;
inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
if (!inner_const || GET_CODE (inner_const) != CONST_INT)
break;
/* Don't associate these operations if they are a PLUS with the
same constant and it is a power of two. These might be doable
with a pre- or post-increment. Similarly for two subtracts of
......
2006-09-04 Eric Botcazou <ebotcazou@libertysurf.fr>
* gcc.c-torture/compile/20060904-1.c: New test.
2006-09-04 Nathan Sidwell <nathan@codesourcery.com>
PR c++/23287 Revert my 2006-09-01 patch
/* PR rtl-optimization/27616 */
/* Reported by Lee Ji Hwan <moonz@kaist.ac.kr> */
/* Testcase by Andrew Pinski <pinskia@gcc.gnu.org> */
struct chunk_s
{
unsigned int size;
int offset_next;
};
typedef struct chunk_s chunk_t;
void foo(chunk_t *first)
{
chunk_t *cur;
char *first0;
do {
first0 = (char *) first;
cur = (chunk_t *) (first0 + first->offset_next);
if ((chunk_t *) (first0 + cur->offset_next) != first)
return ;
first->offset_next = 0;
} while (cur->size != 0);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment