Commit 0b73246f by Richard Sandiford Committed by Richard Sandiford

[41/77] Split scalar integer handling out of force_to_mode

force_to_mode exits partway through for modes that aren't scalar
integers.  This patch splits the remainder of the function out
into a subroutine, force_int_to_mode, so that the modes from that
point on can have type scalar_int_mode.

The patch also makes sure that xmode is kept up-to-date with x
and uses xmode instead of GET_MODE (x) throughout.

2017-08-30  Richard Sandiford  <richard.sandiford@linaro.org>
	    Alan Hayward  <alan.hayward@arm.com>
	    David Sherwood  <david.sherwood@arm.com>

gcc/
	* combine.c (force_int_to_mode): New function, split out from...
	(force_to_mode): ...here.  Keep xmode up-to-date and use it
	instead of GET_MODE (x).

Co-Authored-By: Alan Hayward <alan.hayward@arm.com>
Co-Authored-By: David Sherwood <david.sherwood@arm.com>

From-SVN: r251493
parent 5602f58c
...@@ -2,6 +2,14 @@ ...@@ -2,6 +2,14 @@
Alan Hayward <alan.hayward@arm.com> Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com> David Sherwood <david.sherwood@arm.com>
* combine.c (force_int_to_mode): New function, split out from...
(force_to_mode): ...here. Keep xmode up-to-date and use it
instead of GET_MODE (x).
2017-08-30 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
* optabs-query.h (extraction_insn::struct_mode): Change type to * optabs-query.h (extraction_insn::struct_mode): Change type to
opt_scalar_int_mode and update comment. opt_scalar_int_mode and update comment.
(extraction_insn::field_mode): Change type to scalar_int_mode. (extraction_insn::field_mode): Change type to scalar_int_mode.
......
...@@ -449,6 +449,8 @@ static rtx extract_left_shift (rtx, int); ...@@ -449,6 +449,8 @@ static rtx extract_left_shift (rtx, int);
static int get_pos_from_mask (unsigned HOST_WIDE_INT, static int get_pos_from_mask (unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT *); unsigned HOST_WIDE_INT *);
static rtx canon_reg_for_combine (rtx, rtx); static rtx canon_reg_for_combine (rtx, rtx);
static rtx force_int_to_mode (rtx, scalar_int_mode, scalar_int_mode,
scalar_int_mode, unsigned HOST_WIDE_INT, int);
static rtx force_to_mode (rtx, machine_mode, static rtx force_to_mode (rtx, machine_mode,
unsigned HOST_WIDE_INT, int); unsigned HOST_WIDE_INT, int);
static rtx if_then_else_cond (rtx, rtx *, rtx *); static rtx if_then_else_cond (rtx, rtx *, rtx *);
...@@ -8495,8 +8497,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8495,8 +8497,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
enum rtx_code code = GET_CODE (x); enum rtx_code code = GET_CODE (x);
int next_select = just_select || code == XOR || code == NOT || code == NEG; int next_select = just_select || code == XOR || code == NOT || code == NEG;
machine_mode op_mode; machine_mode op_mode;
unsigned HOST_WIDE_INT fuller_mask, nonzero; unsigned HOST_WIDE_INT nonzero;
rtx op0, op1, temp;
/* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
code below will do the wrong thing since the mode of such an code below will do the wrong thing since the mode of such an
...@@ -8524,15 +8525,6 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8524,15 +8525,6 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
if (op_mode) if (op_mode)
mask &= GET_MODE_MASK (op_mode); mask &= GET_MODE_MASK (op_mode);
/* When we have an arithmetic operation, or a shift whose count we
do not know, we need to assume that all bits up to the highest-order
bit in MASK will be needed. This is how we form such a mask. */
if (mask & (HOST_WIDE_INT_1U << (HOST_BITS_PER_WIDE_INT - 1)))
fuller_mask = HOST_WIDE_INT_M1U;
else
fuller_mask = ((HOST_WIDE_INT_1U << (floor_log2 (mask) + 1))
- 1);
/* Determine what bits of X are guaranteed to be (non)zero. */ /* Determine what bits of X are guaranteed to be (non)zero. */
nonzero = nonzero_bits (x, mode); nonzero = nonzero_bits (x, mode);
...@@ -8570,9 +8562,42 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8570,9 +8562,42 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
& ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))))))) & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
return force_to_mode (SUBREG_REG (x), mode, mask, next_select); return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
/* The arithmetic simplifications here only work for scalar integer modes. */ scalar_int_mode int_mode, xmode;
if (!SCALAR_INT_MODE_P (mode) || !SCALAR_INT_MODE_P (GET_MODE (x))) if (is_a <scalar_int_mode> (mode, &int_mode)
&& is_a <scalar_int_mode> (GET_MODE (x), &xmode))
/* OP_MODE is either MODE or XMODE, so it must be a scalar
integer too. */
return force_int_to_mode (x, int_mode, xmode,
as_a <scalar_int_mode> (op_mode),
mask, just_select);
return gen_lowpart_or_truncate (mode, x); return gen_lowpart_or_truncate (mode, x);
}
/* Subroutine of force_to_mode that handles cases in which both X and
the result are scalar integers. MODE is the mode of the result,
XMODE is the mode of X, and OP_MODE says which of MODE or XMODE
is preferred for simplified versions of X. The other arguments
are as for force_to_mode. */
static rtx
force_int_to_mode (rtx x, scalar_int_mode mode, scalar_int_mode xmode,
scalar_int_mode op_mode, unsigned HOST_WIDE_INT mask,
int just_select)
{
enum rtx_code code = GET_CODE (x);
int next_select = just_select || code == XOR || code == NOT || code == NEG;
unsigned HOST_WIDE_INT fuller_mask;
rtx op0, op1, temp;
/* When we have an arithmetic operation, or a shift whose count we
do not know, we need to assume that all bits up to the highest-order
bit in MASK will be needed. This is how we form such a mask. */
if (mask & (HOST_WIDE_INT_1U << (HOST_BITS_PER_WIDE_INT - 1)))
fuller_mask = HOST_WIDE_INT_M1U;
else
fuller_mask = ((HOST_WIDE_INT_1U << (floor_log2 (mask) + 1))
- 1);
switch (code) switch (code)
{ {
...@@ -8603,14 +8628,14 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8603,14 +8628,14 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
{ {
x = simplify_and_const_int (x, op_mode, XEXP (x, 0), x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
mask & INTVAL (XEXP (x, 1))); mask & INTVAL (XEXP (x, 1)));
xmode = op_mode;
/* If X is still an AND, see if it is an AND with a mask that /* If X is still an AND, see if it is an AND with a mask that
is just some low-order bits. If so, and it is MASK, we don't is just some low-order bits. If so, and it is MASK, we don't
need it. */ need it. */
if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)) if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
&& ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x))) && (INTVAL (XEXP (x, 1)) & GET_MODE_MASK (xmode)) == mask)
== mask))
x = XEXP (x, 0); x = XEXP (x, 0);
/* If it remains an AND, try making another AND with the bits /* If it remains an AND, try making another AND with the bits
...@@ -8619,18 +8644,17 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8619,18 +8644,17 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
cheaper constant. */ cheaper constant. */
if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)) if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
&& GET_MODE_MASK (GET_MODE (x)) != mask && GET_MODE_MASK (xmode) != mask
&& HWI_COMPUTABLE_MODE_P (GET_MODE (x))) && HWI_COMPUTABLE_MODE_P (xmode))
{ {
unsigned HOST_WIDE_INT cval unsigned HOST_WIDE_INT cval
= UINTVAL (XEXP (x, 1)) = UINTVAL (XEXP (x, 1)) | (GET_MODE_MASK (xmode) & ~mask);
| (GET_MODE_MASK (GET_MODE (x)) & ~mask);
rtx y; rtx y;
y = simplify_gen_binary (AND, GET_MODE (x), XEXP (x, 0), y = simplify_gen_binary (AND, xmode, XEXP (x, 0),
gen_int_mode (cval, GET_MODE (x))); gen_int_mode (cval, xmode));
if (set_src_cost (y, GET_MODE (x), optimize_this_for_speed_p) if (set_src_cost (y, xmode, optimize_this_for_speed_p)
< set_src_cost (x, GET_MODE (x), optimize_this_for_speed_p)) < set_src_cost (x, xmode, optimize_this_for_speed_p))
x = y; x = y;
} }
...@@ -8660,7 +8684,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8660,7 +8684,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
&& pow2p_hwi (- smask) && pow2p_hwi (- smask)
&& (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0 && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
&& (INTVAL (XEXP (x, 1)) & ~smask) != 0) && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
return force_to_mode (plus_constant (GET_MODE (x), XEXP (x, 0), return force_to_mode (plus_constant (xmode, XEXP (x, 0),
(INTVAL (XEXP (x, 1)) & smask)), (INTVAL (XEXP (x, 1)) & smask)),
mode, smask, next_select); mode, smask, next_select);
} }
...@@ -8691,8 +8715,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8691,8 +8715,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
if (CONST_INT_P (XEXP (x, 0)) if (CONST_INT_P (XEXP (x, 0))
&& least_bit_hwi (UINTVAL (XEXP (x, 0))) > mask) && least_bit_hwi (UINTVAL (XEXP (x, 0))) > mask)
{ {
x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1), x = simplify_gen_unary (NEG, xmode, XEXP (x, 1), xmode);
GET_MODE (x));
return force_to_mode (x, mode, mask, next_select); return force_to_mode (x, mode, mask, next_select);
} }
...@@ -8701,8 +8724,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8701,8 +8724,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
if (CONST_INT_P (XEXP (x, 0)) if (CONST_INT_P (XEXP (x, 0))
&& ((UINTVAL (XEXP (x, 0)) | fuller_mask) == UINTVAL (XEXP (x, 0)))) && ((UINTVAL (XEXP (x, 0)) | fuller_mask) == UINTVAL (XEXP (x, 0))))
{ {
x = simplify_gen_unary (NOT, GET_MODE (x), x = simplify_gen_unary (NOT, xmode, XEXP (x, 1), xmode);
XEXP (x, 1), GET_MODE (x));
return force_to_mode (x, mode, mask, next_select); return force_to_mode (x, mode, mask, next_select);
} }
...@@ -8723,16 +8745,16 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8723,16 +8745,16 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
&& CONST_INT_P (XEXP (x, 1)) && CONST_INT_P (XEXP (x, 1))
&& ((INTVAL (XEXP (XEXP (x, 0), 1)) && ((INTVAL (XEXP (XEXP (x, 0), 1))
+ floor_log2 (INTVAL (XEXP (x, 1)))) + floor_log2 (INTVAL (XEXP (x, 1))))
< GET_MODE_PRECISION (GET_MODE (x))) < GET_MODE_PRECISION (xmode))
&& (UINTVAL (XEXP (x, 1)) && (UINTVAL (XEXP (x, 1))
& ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0) & ~nonzero_bits (XEXP (x, 0), xmode)) == 0)
{ {
temp = gen_int_mode ((INTVAL (XEXP (x, 1)) & mask) temp = gen_int_mode ((INTVAL (XEXP (x, 1)) & mask)
<< INTVAL (XEXP (XEXP (x, 0), 1)), << INTVAL (XEXP (XEXP (x, 0), 1)),
GET_MODE (x)); xmode);
temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x), temp = simplify_gen_binary (GET_CODE (x), xmode,
XEXP (XEXP (x, 0), 0), temp); XEXP (XEXP (x, 0), 0), temp);
x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp, x = simplify_gen_binary (LSHIFTRT, xmode, temp,
XEXP (XEXP (x, 0), 1)); XEXP (XEXP (x, 0), 1));
return force_to_mode (x, mode, mask, next_select); return force_to_mode (x, mode, mask, next_select);
} }
...@@ -8756,8 +8778,11 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8756,8 +8778,11 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
op0 = gen_lowpart_or_truncate (op_mode, op0); op0 = gen_lowpart_or_truncate (op_mode, op0);
op1 = gen_lowpart_or_truncate (op_mode, op1); op1 = gen_lowpart_or_truncate (op_mode, op1);
if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) if (op_mode != xmode || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
{
x = simplify_gen_binary (code, op_mode, op0, op1); x = simplify_gen_binary (code, op_mode, op0, op1);
xmode = op_mode;
}
break; break;
case ASHIFT: case ASHIFT:
...@@ -8790,8 +8815,11 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8790,8 +8815,11 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
force_to_mode (XEXP (x, 0), op_mode, force_to_mode (XEXP (x, 0), op_mode,
mask, next_select)); mask, next_select));
if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) if (op_mode != xmode || op0 != XEXP (x, 0))
{
x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1)); x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
xmode = op_mode;
}
break; break;
case LSHIFTRT: case LSHIFTRT:
...@@ -8813,13 +8841,16 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8813,13 +8841,16 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
/* We can only change the mode of the shift if we can do arithmetic /* We can only change the mode of the shift if we can do arithmetic
in the mode of the shift and INNER_MASK is no wider than the in the mode of the shift and INNER_MASK is no wider than the
width of X's mode. */ width of X's mode. */
if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0) if ((inner_mask & ~GET_MODE_MASK (xmode)) != 0)
op_mode = GET_MODE (x); op_mode = xmode;
inner = force_to_mode (inner, op_mode, inner_mask, next_select); inner = force_to_mode (inner, op_mode, inner_mask, next_select);
if (GET_MODE (x) != op_mode || inner != XEXP (x, 0)) if (xmode != op_mode || inner != XEXP (x, 0))
{
x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1)); x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
xmode = op_mode;
}
} }
/* If we have (and (lshiftrt FOO C1) C2) where the combination of the /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
...@@ -8832,17 +8863,17 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8832,17 +8863,17 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
bit. */ bit. */
&& ((INTVAL (XEXP (x, 1)) && ((INTVAL (XEXP (x, 1))
+ num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))) + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
>= GET_MODE_PRECISION (GET_MODE (x))) >= GET_MODE_PRECISION (xmode))
&& pow2p_hwi (mask + 1) && pow2p_hwi (mask + 1)
/* Number of bits left after the shift must be more than the mask /* Number of bits left after the shift must be more than the mask
needs. */ needs. */
&& ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1)) && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
<= GET_MODE_PRECISION (GET_MODE (x))) <= GET_MODE_PRECISION (xmode))
/* Must be more sign bit copies than the mask needs. */ /* Must be more sign bit copies than the mask needs. */
&& ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))) && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
>= exact_log2 (mask + 1))) >= exact_log2 (mask + 1)))
x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), x = simplify_gen_binary (LSHIFTRT, xmode, XEXP (x, 0),
GEN_INT (GET_MODE_PRECISION (GET_MODE (x)) GEN_INT (GET_MODE_PRECISION (xmode)
- exact_log2 (mask + 1))); - exact_log2 (mask + 1)));
goto shiftrt; goto shiftrt;
...@@ -8850,7 +8881,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8850,7 +8881,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
case ASHIFTRT: case ASHIFTRT:
/* If we are just looking for the sign bit, we don't need this shift at /* If we are just looking for the sign bit, we don't need this shift at
all, even if it has a variable count. */ all, even if it has a variable count. */
if (val_signbit_p (GET_MODE (x), mask)) if (val_signbit_p (xmode, mask))
return force_to_mode (XEXP (x, 0), mode, mask, next_select); return force_to_mode (XEXP (x, 0), mode, mask, next_select);
/* If this is a shift by a constant, get a mask that contains those bits /* If this is a shift by a constant, get a mask that contains those bits
...@@ -8863,13 +8894,14 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8863,13 +8894,14 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
{ {
unsigned HOST_WIDE_INT nonzero;
int i; int i;
/* If the considered data is wider than HOST_WIDE_INT, we can't /* If the considered data is wider than HOST_WIDE_INT, we can't
represent a mask for all its bits in a single scalar. represent a mask for all its bits in a single scalar.
But we only care about the lower bits, so calculate these. */ But we only care about the lower bits, so calculate these. */
if (GET_MODE_PRECISION (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT) if (GET_MODE_PRECISION (xmode) > HOST_BITS_PER_WIDE_INT)
{ {
nonzero = HOST_WIDE_INT_M1U; nonzero = HOST_WIDE_INT_M1U;
...@@ -8878,21 +8910,21 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8878,21 +8910,21 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
We need only shift if these are fewer than nonzero can We need only shift if these are fewer than nonzero can
hold. If not, we must keep all bits set in nonzero. */ hold. If not, we must keep all bits set in nonzero. */
if (GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1)) if (GET_MODE_PRECISION (xmode) - INTVAL (XEXP (x, 1))
< HOST_BITS_PER_WIDE_INT) < HOST_BITS_PER_WIDE_INT)
nonzero >>= INTVAL (XEXP (x, 1)) nonzero >>= INTVAL (XEXP (x, 1))
+ HOST_BITS_PER_WIDE_INT + HOST_BITS_PER_WIDE_INT
- GET_MODE_PRECISION (GET_MODE (x)) ; - GET_MODE_PRECISION (xmode);
} }
else else
{ {
nonzero = GET_MODE_MASK (GET_MODE (x)); nonzero = GET_MODE_MASK (xmode);
nonzero >>= INTVAL (XEXP (x, 1)); nonzero >>= INTVAL (XEXP (x, 1));
} }
if ((mask & ~nonzero) == 0) if ((mask & ~nonzero) == 0)
{ {
x = simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x), x = simplify_shift_const (NULL_RTX, LSHIFTRT, xmode,
XEXP (x, 0), INTVAL (XEXP (x, 1))); XEXP (x, 0), INTVAL (XEXP (x, 1)));
if (GET_CODE (x) != ASHIFTRT) if (GET_CODE (x) != ASHIFTRT)
return force_to_mode (x, mode, mask, next_select); return force_to_mode (x, mode, mask, next_select);
...@@ -8901,8 +8933,8 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8901,8 +8933,8 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
else if ((i = exact_log2 (mask)) >= 0) else if ((i = exact_log2 (mask)) >= 0)
{ {
x = simplify_shift_const x = simplify_shift_const
(NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0), (NULL_RTX, LSHIFTRT, xmode, XEXP (x, 0),
GET_MODE_PRECISION (GET_MODE (x)) - 1 - i); GET_MODE_PRECISION (xmode) - 1 - i);
if (GET_CODE (x) != ASHIFTRT) if (GET_CODE (x) != ASHIFTRT)
return force_to_mode (x, mode, mask, next_select); return force_to_mode (x, mode, mask, next_select);
...@@ -8912,8 +8944,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8912,8 +8944,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
/* If MASK is 1, convert this to an LSHIFTRT. This can be done /* If MASK is 1, convert this to an LSHIFTRT. This can be done
even if the shift count isn't a constant. */ even if the shift count isn't a constant. */
if (mask == 1) if (mask == 1)
x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), x = simplify_gen_binary (LSHIFTRT, xmode, XEXP (x, 0), XEXP (x, 1));
XEXP (x, 0), XEXP (x, 1));
shiftrt: shiftrt:
...@@ -8925,7 +8956,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8925,7 +8956,7 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
&& CONST_INT_P (XEXP (x, 1)) && CONST_INT_P (XEXP (x, 1))
&& INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) >= 0
&& (INTVAL (XEXP (x, 1)) && (INTVAL (XEXP (x, 1))
<= GET_MODE_PRECISION (GET_MODE (x)) - (floor_log2 (mask) + 1)) <= GET_MODE_PRECISION (xmode) - (floor_log2 (mask) + 1))
&& GET_CODE (XEXP (x, 0)) == ASHIFT && GET_CODE (XEXP (x, 0)) == ASHIFT
&& XEXP (XEXP (x, 0), 1) == XEXP (x, 1)) && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
...@@ -8943,12 +8974,11 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8943,12 +8974,11 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
&& INTVAL (XEXP (x, 1)) >= 0) && INTVAL (XEXP (x, 1)) >= 0)
{ {
temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE, temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
GET_MODE (x), xmode, gen_int_mode (mask, xmode),
gen_int_mode (mask, GET_MODE (x)),
XEXP (x, 1)); XEXP (x, 1));
if (temp && CONST_INT_P (temp)) if (temp && CONST_INT_P (temp))
x = simplify_gen_binary (code, GET_MODE (x), x = simplify_gen_binary (code, xmode,
force_to_mode (XEXP (x, 0), GET_MODE (x), force_to_mode (XEXP (x, 0), xmode,
INTVAL (temp), next_select), INTVAL (temp), next_select),
XEXP (x, 1)); XEXP (x, 1));
} }
...@@ -8975,14 +9005,12 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8975,14 +9005,12 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
&& CONST_INT_P (XEXP (XEXP (x, 0), 1)) && CONST_INT_P (XEXP (XEXP (x, 0), 1))
&& INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
&& (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask) && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
< GET_MODE_PRECISION (GET_MODE (x))) < GET_MODE_PRECISION (xmode))
&& INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT) && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
{ {
temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)), temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)), xmode);
GET_MODE (x)); temp = simplify_gen_binary (XOR, xmode, XEXP (XEXP (x, 0), 0), temp);
temp = simplify_gen_binary (XOR, GET_MODE (x), x = simplify_gen_binary (LSHIFTRT, xmode,
XEXP (XEXP (x, 0), 0), temp);
x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
temp, XEXP (XEXP (x, 0), 1)); temp, XEXP (XEXP (x, 0), 1));
return force_to_mode (x, mode, mask, next_select); return force_to_mode (x, mode, mask, next_select);
...@@ -8996,8 +9024,11 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8996,8 +9024,11 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
op0 = gen_lowpart_or_truncate (op_mode, op0 = gen_lowpart_or_truncate (op_mode,
force_to_mode (XEXP (x, 0), mode, mask, force_to_mode (XEXP (x, 0), mode, mask,
next_select)); next_select));
if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) if (op_mode != xmode || op0 != XEXP (x, 0))
{
x = simplify_gen_unary (code, op_mode, op0, op_mode); x = simplify_gen_unary (code, op_mode, op0, op_mode);
xmode = op_mode;
}
break; break;
case NE: case NE:
...@@ -9018,14 +9049,14 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -9018,14 +9049,14 @@ force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
/* We have no way of knowing if the IF_THEN_ELSE can itself be /* We have no way of knowing if the IF_THEN_ELSE can itself be
written in a narrower mode. We play it safe and do not do so. */ written in a narrower mode. We play it safe and do not do so. */
op0 = gen_lowpart_or_truncate (GET_MODE (x), op0 = gen_lowpart_or_truncate (xmode,
force_to_mode (XEXP (x, 1), mode, force_to_mode (XEXP (x, 1), mode,
mask, next_select)); mask, next_select));
op1 = gen_lowpart_or_truncate (GET_MODE (x), op1 = gen_lowpart_or_truncate (xmode,
force_to_mode (XEXP (x, 2), mode, force_to_mode (XEXP (x, 2), mode,
mask, next_select)); mask, next_select));
if (op0 != XEXP (x, 1) || op1 != XEXP (x, 2)) if (op0 != XEXP (x, 1) || op1 != XEXP (x, 2))
x = simplify_gen_ternary (IF_THEN_ELSE, GET_MODE (x), x = simplify_gen_ternary (IF_THEN_ELSE, xmode,
GET_MODE (XEXP (x, 0)), XEXP (x, 0), GET_MODE (XEXP (x, 0)), XEXP (x, 0),
op0, op1); op0, op1);
break; break;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment