Commit 25910ca4 by Kazu Hirata Committed by Kazu Hirata

cse.c (fold_rtx_subreg, [...]): New.

	* cse.c (fold_rtx_subreg, fold_rtx_mem): New.
	(fold_rtx): Call fold_rtx_subreg and fold_rtx_mem to handle
	SUBREG and MEM, respectively.

From-SVN: r95964
parent 71047303
2005-03-06 Kazu Hirata <kazu@cs.umass.edu> 2005-03-06 Kazu Hirata <kazu@cs.umass.edu>
* cse.c (fold_rtx_subreg, fold_rtx_mem): New.
(fold_rtx): Call fold_rtx_subreg and fold_rtx_mem to handle
SUBREG and MEM, respectively.
2005-03-06 Kazu Hirata <kazu@cs.umass.edu>
* fold-const.c (fold_binary): Avoid directly using the original * fold-const.c (fold_binary): Avoid directly using the original
expression t as much as possible. expression t as much as possible.
......
...@@ -3210,81 +3210,26 @@ find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2, ...@@ -3210,81 +3210,26 @@ find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
return code; return code;
} }
/* If X is a nontrivial arithmetic operation on an argument /* Fold SUBREG. */
for which a constant value can be determined, return
the result of operating on that value, as a constant.
Otherwise, return X, possibly with one or more operands
modified by recursive calls to this function.
If X is a register whose contents are known, we do NOT
return those contents here. equiv_constant is called to
perform that task.
INSN is the insn that we may be modifying. If it is 0, make a copy
of X before modifying it. */
static rtx static rtx
fold_rtx (rtx x, rtx insn) fold_rtx_subreg (rtx x, rtx insn)
{ {
enum rtx_code code; enum machine_mode mode = GET_MODE (x);
enum machine_mode mode;
const char *fmt;
int i;
rtx new = 0;
int copied = 0;
int must_swap = 0;
/* Folded equivalents of first two operands of X. */
rtx folded_arg0; rtx folded_arg0;
rtx folded_arg1;
/* Constant equivalents of first three operands of X;
0 when no such equivalent is known. */
rtx const_arg0; rtx const_arg0;
rtx const_arg1; rtx new;
rtx const_arg2;
/* The mode of the first operand of X. We need this for sign and zero
extends. */
enum machine_mode mode_arg0;
if (x == 0)
return x;
mode = GET_MODE (x);
code = GET_CODE (x);
switch (code)
{
case CONST:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
case REG:
case PC:
/* No use simplifying an EXPR_LIST
since they are used only for lists of args
in a function call's REG_EQUAL note. */
case EXPR_LIST:
return x;
#ifdef HAVE_cc0
case CC0:
return prev_insn_cc0;
#endif
case SUBREG:
/* See if we previously assigned a constant value to this SUBREG. */ /* See if we previously assigned a constant value to this SUBREG. */
if ((new = lookup_as_function (x, CONST_INT)) != 0 if ((new = lookup_as_function (x, CONST_INT)) != 0
|| (new = lookup_as_function (x, CONST_DOUBLE)) != 0) || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
return new; return new;
/* If this is a paradoxical SUBREG, we have no idea what value the /* If this is a paradoxical SUBREG, we have no idea what value the
extra bits would have. However, if the operand is equivalent extra bits would have. However, if the operand is equivalent to
to a SUBREG whose operand is the same as our mode, and all the a SUBREG whose operand is the same as our mode, and all the modes
modes are within a word, we can just use the inner operand are within a word, we can just use the inner operand because
because these SUBREGs just say how to treat the register. these SUBREGs just say how to treat the register.
Similarly if we find an integer constant. */ Similarly if we find an integer constant. */
...@@ -3312,9 +3257,9 @@ fold_rtx (rtx x, rtx insn) ...@@ -3312,9 +3257,9 @@ fold_rtx (rtx x, rtx insn)
return x; return x;
} }
/* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG. /* Fold SUBREG_REG. If it changed, see if we can simplify the
We might be able to if the SUBREG is extracting a single word in an SUBREG. We might be able to if the SUBREG is extracting a single
integral mode or extracting the low part. */ word in an integral mode or extracting the low part. */
folded_arg0 = fold_rtx (SUBREG_REG (x), insn); folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
const_arg0 = equiv_constant (folded_arg0); const_arg0 = equiv_constant (folded_arg0);
...@@ -3344,18 +3289,19 @@ fold_rtx (rtx x, rtx insn) ...@@ -3344,18 +3289,19 @@ fold_rtx (rtx x, rtx insn)
if (subreg_lowpart_p (x)) if (subreg_lowpart_p (x))
/* If this is a narrowing SUBREG and our operand is a REG, see /* If this is a narrowing SUBREG and our operand is a REG, see
if we can find an equivalence for REG that is an arithmetic if we can find an equivalence for REG that is an arithmetic
operation in a wider mode where both operands are paradoxical operation in a wider mode where both operands are
SUBREGs from objects of our result mode. In that case, we paradoxical SUBREGs from objects of our result mode. In
couldn-t report an equivalent value for that operation, since we that case, we couldn-t report an equivalent value for that
don't know what the extra bits will be. But we can find an operation, since we don't know what the extra bits will be.
equivalence for this SUBREG by folding that operation in the But we can find an equivalence for this SUBREG by folding
narrow mode. This allows us to fold arithmetic in narrow modes that operation in the narrow mode. This allows us to fold
when the machine only supports word-sized arithmetic. arithmetic in narrow modes when the machine only supports
word-sized arithmetic.
Also look for a case where we have a SUBREG whose operand Also look for a case where we have a SUBREG whose operand
is the same as our result. If both modes are smaller is the same as our result. If both modes are smaller than
than a word, we are simply interpreting a register in a word, we are simply interpreting a register in different
different modes and we can use the inner value. */ modes and we can use the inner value. */
for (; elt; elt = elt->next_same_value) for (; elt; elt = elt->next_same_value)
{ {
...@@ -3410,9 +3356,10 @@ fold_rtx (rtx x, rtx insn) ...@@ -3410,9 +3356,10 @@ fold_rtx (rtx x, rtx insn)
op1 = equiv_constant (op1); op1 = equiv_constant (op1);
/* If we are looking for the low SImode part of /* If we are looking for the low SImode part of
(ashift:DI c (const_int 32)), it doesn't work (ashift:DI c (const_int 32)), it doesn't work to
to compute that in SImode, because a 32-bit shift compute that in SImode, because a 32-bit shift in
in SImode is unpredictable. We know the value is 0. */ SImode is unpredictable. We know the value is
0. */
if (op0 && op1 if (op0 && op1
&& GET_CODE (elt->exp) == ASHIFT && GET_CODE (elt->exp) == ASHIFT
&& GET_CODE (op1) == CONST_INT && GET_CODE (op1) == CONST_INT
...@@ -3421,9 +3368,8 @@ fold_rtx (rtx x, rtx insn) ...@@ -3421,9 +3368,8 @@ fold_rtx (rtx x, rtx insn)
if (INTVAL (op1) if (INTVAL (op1)
< GET_MODE_BITSIZE (GET_MODE (elt->exp))) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
/* If the count fits in the inner mode's width, /* If the count fits in the inner mode's width,
but exceeds the outer mode's width, but exceeds the outer mode's width, the value
the value will get truncated to 0 will get truncated to 0 by the subreg. */
by the subreg. */
new = CONST0_RTX (mode); new = CONST0_RTX (mode);
else else
/* If the count exceeds even the inner mode's width, /* If the count exceeds even the inner mode's width,
...@@ -3431,7 +3377,8 @@ fold_rtx (rtx x, rtx insn) ...@@ -3431,7 +3377,8 @@ fold_rtx (rtx x, rtx insn)
new = 0; new = 0;
} }
else if (op0 && op1) else if (op0 && op1)
new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1); new = simplify_binary_operation (GET_CODE (elt->exp),
mode, op0, op1);
} }
else if (GET_CODE (elt->exp) == SUBREG else if (GET_CODE (elt->exp) == SUBREG
...@@ -3445,10 +3392,11 @@ fold_rtx (rtx x, rtx insn) ...@@ -3445,10 +3392,11 @@ fold_rtx (rtx x, rtx insn)
return new; return new;
} }
else else
/* A SUBREG resulting from a zero extension may fold to zero if /* A SUBREG resulting from a zero extension may fold to zero
it extracts higher bits than the ZERO_EXTEND's source bits. if it extracts higher bits than the ZERO_EXTEND's source
FIXME: if combine tried to, er, combine these instructions, bits. FIXME: if combine tried to, er, combine these
this transformation may be moved to simplify_subreg. */ instructions, this transformation may be moved to
simplify_subreg. */
for (; elt; elt = elt->next_same_value) for (; elt; elt = elt->next_same_value)
{ {
if (GET_CODE (elt->exp) == ZERO_EXTEND if (GET_CODE (elt->exp) == ZERO_EXTEND
...@@ -3459,26 +3407,26 @@ fold_rtx (rtx x, rtx insn) ...@@ -3459,26 +3407,26 @@ fold_rtx (rtx x, rtx insn)
} }
return x; return x;
}
case NOT: /* Fold MEM. */
case NEG:
/* If we have (NOT Y), see if Y is known to be (NOT Z). static rtx
If so, (NOT Y) simplifies to Z. Similarly for NEG. */ fold_rtx_mem (rtx x, rtx insn)
new = lookup_as_function (XEXP (x, 0), code); {
if (new) enum machine_mode mode = GET_MODE (x);
return fold_rtx (copy_rtx (XEXP (new, 0)), insn); rtx new;
break;
case MEM:
/* If we are not actually processing an insn, don't try to find the /* If we are not actually processing an insn, don't try to find the
best address. Not only don't we care, but we could modify the best address. Not only don't we care, but we could modify the
MEM in an invalid way since we have no insn to validate against. */ MEM in an invalid way since we have no insn to validate
against. */
if (insn != 0) if (insn != 0)
find_best_addr (insn, &XEXP (x, 0), GET_MODE (x)); find_best_addr (insn, &XEXP (x, 0), mode);
{ {
/* Even if we don't fold in the insn itself, /* Even if we don't fold in the insn itself, we can safely do so
we can safely do so here, in hopes of getting a constant. */ here, in hopes of getting a constant. */
rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX); rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
rtx base = 0; rtx base = 0;
HOST_WIDE_INT offset = 0; HOST_WIDE_INT offset = 0;
...@@ -3494,7 +3442,8 @@ fold_rtx (rtx x, rtx insn) ...@@ -3494,7 +3442,8 @@ fold_rtx (rtx x, rtx insn)
addr = addr_ent->const_rtx; addr = addr_ent->const_rtx;
} }
/* If address is constant, split it into a base and integer offset. */ /* If address is constant, split it into a base and integer
offset. */
if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF) if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
base = addr; base = addr;
else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
...@@ -3522,15 +3471,16 @@ fold_rtx (rtx x, rtx insn) ...@@ -3522,15 +3471,16 @@ fold_rtx (rtx x, rtx insn)
constant_pool_entries_regcost = approx_reg_cost (constant); constant_pool_entries_regcost = approx_reg_cost (constant);
} }
/* If we are loading the full constant, we have an equivalence. */ /* If we are loading the full constant, we have an
equivalence. */
if (offset == 0 && mode == const_mode) if (offset == 0 && mode == const_mode)
return constant; return constant;
/* If this actually isn't a constant (weird!), we can't do /* If this actually isn't a constant (weird!), we can't do
anything. Otherwise, handle the two most common cases: anything. Otherwise, handle the two most common cases:
extracting a word from a multi-word constant, and extracting extracting a word from a multi-word constant, and
the low-order bits. Other cases don't seem common enough to extracting the low-order bits. Other cases don't seem
worry about. */ common enough to worry about. */
if (! CONSTANT_P (constant)) if (! CONSTANT_P (constant))
return x; return x;
...@@ -3583,10 +3533,10 @@ fold_rtx (rtx x, rtx insn) ...@@ -3583,10 +3533,10 @@ fold_rtx (rtx x, rtx insn)
if (GET_MODE (table) != Pmode) if (GET_MODE (table) != Pmode)
new = gen_rtx_TRUNCATE (GET_MODE (table), new); new = gen_rtx_TRUNCATE (GET_MODE (table), new);
/* Indicate this is a constant. This isn't a /* Indicate this is a constant. This isn't a valid
valid form of CONST, but it will only be used form of CONST, but it will only be used to fold the
to fold the next insns and then discarded, so next insns and then discarded, so it should be
it should be safe. safe.
Note this expression must be explicitly discarded, Note this expression must be explicitly discarded,
by cse_insn, else it may end up in a REG_EQUAL note by cse_insn, else it may end up in a REG_EQUAL note
...@@ -3598,6 +3548,86 @@ fold_rtx (rtx x, rtx insn) ...@@ -3598,6 +3548,86 @@ fold_rtx (rtx x, rtx insn)
return x; return x;
} }
}
/* If X is a nontrivial arithmetic operation on an argument
for which a constant value can be determined, return
the result of operating on that value, as a constant.
Otherwise, return X, possibly with one or more operands
modified by recursive calls to this function.
If X is a register whose contents are known, we do NOT
return those contents here. equiv_constant is called to
perform that task.
INSN is the insn that we may be modifying. If it is 0, make a copy
of X before modifying it. */
static rtx
fold_rtx (rtx x, rtx insn)
{
enum rtx_code code;
enum machine_mode mode;
const char *fmt;
int i;
rtx new = 0;
int copied = 0;
int must_swap = 0;
/* Folded equivalents of first two operands of X. */
rtx folded_arg0;
rtx folded_arg1;
/* Constant equivalents of first three operands of X;
0 when no such equivalent is known. */
rtx const_arg0;
rtx const_arg1;
rtx const_arg2;
/* The mode of the first operand of X. We need this for sign and zero
extends. */
enum machine_mode mode_arg0;
if (x == 0)
return x;
mode = GET_MODE (x);
code = GET_CODE (x);
switch (code)
{
case CONST:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
case REG:
case PC:
/* No use simplifying an EXPR_LIST
since they are used only for lists of args
in a function call's REG_EQUAL note. */
case EXPR_LIST:
return x;
#ifdef HAVE_cc0
case CC0:
return prev_insn_cc0;
#endif
case SUBREG:
return fold_rtx_subreg (x, insn);
case NOT:
case NEG:
/* If we have (NOT Y), see if Y is known to be (NOT Z).
If so, (NOT Y) simplifies to Z. Similarly for NEG. */
new = lookup_as_function (XEXP (x, 0), code);
if (new)
return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
break;
case MEM:
return fold_rtx_mem (x, insn);
#ifdef NO_FUNCTION_CSE #ifdef NO_FUNCTION_CSE
case CALL: case CALL:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment