Commit 5511bc5a by Bernd Schmidt Committed by Bernd Schmidt

explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION instead of…

explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION instead of GET_MODE_BITSIZE where appropriate.

	* explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION
	instead of GET_MODE_BITSIZE where appropriate.
	* rtlanal.c (subreg_lsb_1, subreg_get_info, nonzero_bits1,
	num_sign_bit_copies1, canonicalize_condition, low_bitmask_len,
	init_num_sign_bit_copies_in_rep): Likewise.
	* cse.c (fold_rtx, cse_insn): Likewise.
	* loop-doloop.c (doloop_modify, doloop_optimize): Likewise.
	* simplify-rtx.c (simplify_unary_operation_1,
	simplify_const_unary_operation, simplify_binary_operation_1,
	simplify_const_binary_operation, simplify_ternary_operation,
	simplify_const_relational_operation, simplify_subreg): Likewise.
	* combine.c (try_combine, find_split_point, combine_simplify_rtx,
	simplify_if_then_else, simplify_set, expand_compound_operation,
	expand_field_assignment, make_extraction, if_then_else_cond,
	make_compound_operation, force_to_mode, make_field_assignment,
	reg_nonzero_bits_for_combine, reg_num_sign_bit_copies_for_combine,
	extended_count, try_widen_shift_mode, simplify_shift_const_1,
	simplify_comparison, record_promoted_value, simplify_compare_const,
	record_dead_and_set_regs_1): Likewise.

From-SVN: r175946
parent 46c9550f
...@@ -24,6 +24,26 @@ ...@@ -24,6 +24,26 @@
simplify_binary_operation_1, simplify_const_relational_operation): simplify_binary_operation_1, simplify_const_relational_operation):
Likewise. Likewise.
* explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION
instead of GET_MODE_BITSIZE where appropriate.
* rtlanal.c (subreg_lsb_1, subreg_get_info, nonzero_bits1,
num_sign_bit_copies1, canonicalize_condition, low_bitmask_len,
init_num_sign_bit_copies_in_rep): Likewise.
* cse.c (fold_rtx, cse_insn): Likewise.
* loop-doloop.c (doloop_modify, doloop_optimize): Likewise.
* simplify-rtx.c (simplify_unary_operation_1,
simplify_const_unary_operation, simplify_binary_operation_1,
simplify_const_binary_operation, simplify_ternary_operation,
simplify_const_relational_operation, simplify_subreg): Likewise.
* combine.c (try_combine, find_split_point, combine_simplify_rtx,
simplify_if_then_else, simplify_set, expand_compound_operation,
expand_field_assignment, make_extraction, if_then_else_cond,
make_compound_operation, force_to_mode, make_field_assignment,
reg_nonzero_bits_for_combine, reg_num_sign_bit_copies_for_combine,
extended_count, try_widen_shift_mode, simplify_shift_const_1,
simplify_comparison, record_promoted_value, simplify_compare_const,
record_dead_and_set_regs_1): Likewise.
2011-07-06 Michael Meissner <meissner@linux.vnet.ibm.com> 2011-07-06 Michael Meissner <meissner@linux.vnet.ibm.com>
* config/rs6000/rs6000-protos.h (rs6000_call_indirect_aix): New * config/rs6000/rs6000-protos.h (rs6000_call_indirect_aix): New
......
...@@ -2758,14 +2758,14 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p, ...@@ -2758,14 +2758,14 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
offset = INTVAL (XEXP (dest, 2)); offset = INTVAL (XEXP (dest, 2));
dest = XEXP (dest, 0); dest = XEXP (dest, 0);
if (BITS_BIG_ENDIAN) if (BITS_BIG_ENDIAN)
offset = GET_MODE_BITSIZE (GET_MODE (dest)) - width - offset; offset = GET_MODE_PRECISION (GET_MODE (dest)) - width - offset;
} }
} }
else else
{ {
if (GET_CODE (dest) == STRICT_LOW_PART) if (GET_CODE (dest) == STRICT_LOW_PART)
dest = XEXP (dest, 0); dest = XEXP (dest, 0);
width = GET_MODE_BITSIZE (GET_MODE (dest)); width = GET_MODE_PRECISION (GET_MODE (dest));
offset = 0; offset = 0;
} }
...@@ -2775,16 +2775,16 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p, ...@@ -2775,16 +2775,16 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
if (subreg_lowpart_p (dest)) if (subreg_lowpart_p (dest))
; ;
/* Handle the case where inner is twice the size of outer. */ /* Handle the case where inner is twice the size of outer. */
else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp))) else if (GET_MODE_PRECISION (GET_MODE (SET_DEST (temp)))
== 2 * GET_MODE_BITSIZE (GET_MODE (dest))) == 2 * GET_MODE_PRECISION (GET_MODE (dest)))
offset += GET_MODE_BITSIZE (GET_MODE (dest)); offset += GET_MODE_PRECISION (GET_MODE (dest));
/* Otherwise give up for now. */ /* Otherwise give up for now. */
else else
offset = -1; offset = -1;
} }
if (offset >= 0 if (offset >= 0
&& (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp))) && (GET_MODE_PRECISION (GET_MODE (SET_DEST (temp)))
<= HOST_BITS_PER_DOUBLE_INT)) <= HOST_BITS_PER_DOUBLE_INT))
{ {
double_int m, o, i; double_int m, o, i;
...@@ -3745,8 +3745,8 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p, ...@@ -3745,8 +3745,8 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
(REG_P (temp) (REG_P (temp)
&& VEC_index (reg_stat_type, reg_stat, && VEC_index (reg_stat_type, reg_stat,
REGNO (temp))->nonzero_bits != 0 REGNO (temp))->nonzero_bits != 0
&& GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD && GET_MODE_PRECISION (GET_MODE (temp)) < BITS_PER_WORD
&& GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT && GET_MODE_PRECISION (GET_MODE (temp)) < HOST_BITS_PER_INT
&& (VEC_index (reg_stat_type, reg_stat, && (VEC_index (reg_stat_type, reg_stat,
REGNO (temp))->nonzero_bits REGNO (temp))->nonzero_bits
!= GET_MODE_MASK (word_mode)))) != GET_MODE_MASK (word_mode))))
...@@ -3755,8 +3755,8 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p, ...@@ -3755,8 +3755,8 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
(REG_P (temp) (REG_P (temp)
&& VEC_index (reg_stat_type, reg_stat, && VEC_index (reg_stat_type, reg_stat,
REGNO (temp))->nonzero_bits != 0 REGNO (temp))->nonzero_bits != 0
&& GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD && GET_MODE_PRECISION (GET_MODE (temp)) < BITS_PER_WORD
&& GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT && GET_MODE_PRECISION (GET_MODE (temp)) < HOST_BITS_PER_INT
&& (VEC_index (reg_stat_type, reg_stat, && (VEC_index (reg_stat_type, reg_stat,
REGNO (temp))->nonzero_bits REGNO (temp))->nonzero_bits
!= GET_MODE_MASK (word_mode))))) != GET_MODE_MASK (word_mode)))))
...@@ -4685,7 +4685,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src) ...@@ -4685,7 +4685,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src)
&& CONST_INT_P (SET_SRC (x)) && CONST_INT_P (SET_SRC (x))
&& ((INTVAL (XEXP (SET_DEST (x), 1)) && ((INTVAL (XEXP (SET_DEST (x), 1))
+ INTVAL (XEXP (SET_DEST (x), 2))) + INTVAL (XEXP (SET_DEST (x), 2)))
<= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))) <= GET_MODE_PRECISION (GET_MODE (XEXP (SET_DEST (x), 0))))
&& ! side_effects_p (XEXP (SET_DEST (x), 0))) && ! side_effects_p (XEXP (SET_DEST (x), 0)))
{ {
HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2)); HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
...@@ -4698,7 +4698,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src) ...@@ -4698,7 +4698,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src)
rtx or_mask; rtx or_mask;
if (BITS_BIG_ENDIAN) if (BITS_BIG_ENDIAN)
pos = GET_MODE_BITSIZE (mode) - len - pos; pos = GET_MODE_PRECISION (mode) - len - pos;
or_mask = gen_int_mode (src << pos, mode); or_mask = gen_int_mode (src << pos, mode);
if (src == mask) if (src == mask)
...@@ -4791,7 +4791,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src) ...@@ -4791,7 +4791,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src)
break; break;
pos = 0; pos = 0;
len = GET_MODE_BITSIZE (GET_MODE (inner)); len = GET_MODE_PRECISION (GET_MODE (inner));
unsignedp = 0; unsignedp = 0;
break; break;
...@@ -4805,7 +4805,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src) ...@@ -4805,7 +4805,7 @@ find_split_point (rtx *loc, rtx insn, bool set_src)
pos = INTVAL (XEXP (SET_SRC (x), 2)); pos = INTVAL (XEXP (SET_SRC (x), 2));
if (BITS_BIG_ENDIAN) if (BITS_BIG_ENDIAN)
pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos; pos = GET_MODE_PRECISION (GET_MODE (inner)) - len - pos;
unsignedp = (code == ZERO_EXTRACT); unsignedp = (code == ZERO_EXTRACT);
} }
break; break;
...@@ -4814,7 +4814,8 @@ find_split_point (rtx *loc, rtx insn, bool set_src) ...@@ -4814,7 +4814,8 @@ find_split_point (rtx *loc, rtx insn, bool set_src)
break; break;
} }
if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner))) if (len && pos >= 0
&& pos + len <= GET_MODE_PRECISION (GET_MODE (inner)))
{ {
enum machine_mode mode = GET_MODE (SET_SRC (x)); enum machine_mode mode = GET_MODE (SET_SRC (x));
...@@ -4845,9 +4846,9 @@ find_split_point (rtx *loc, rtx insn, bool set_src) ...@@ -4845,9 +4846,9 @@ find_split_point (rtx *loc, rtx insn, bool set_src)
(unsignedp ? LSHIFTRT : ASHIFTRT, mode, (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
gen_rtx_ASHIFT (mode, gen_rtx_ASHIFT (mode,
gen_lowpart (mode, inner), gen_lowpart (mode, inner),
GEN_INT (GET_MODE_BITSIZE (mode) GEN_INT (GET_MODE_PRECISION (mode)
- len - pos)), - len - pos)),
GEN_INT (GET_MODE_BITSIZE (mode) - len))); GEN_INT (GET_MODE_PRECISION (mode) - len)));
split = find_split_point (&SET_SRC (x), insn, true); split = find_split_point (&SET_SRC (x), insn, true);
if (split && split != &SET_SRC (x)) if (split && split != &SET_SRC (x))
...@@ -5544,7 +5545,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, ...@@ -5544,7 +5545,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
if (GET_CODE (temp) == ASHIFTRT if (GET_CODE (temp) == ASHIFTRT
&& CONST_INT_P (XEXP (temp, 1)) && CONST_INT_P (XEXP (temp, 1))
&& INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1) && INTVAL (XEXP (temp, 1)) == GET_MODE_PRECISION (mode) - 1)
return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0), return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
INTVAL (XEXP (temp, 1))); INTVAL (XEXP (temp, 1)));
...@@ -5563,8 +5564,8 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, ...@@ -5563,8 +5564,8 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
rtx temp1 = simplify_shift_const rtx temp1 = simplify_shift_const
(NULL_RTX, ASHIFTRT, mode, (NULL_RTX, ASHIFTRT, mode,
simplify_shift_const (NULL_RTX, ASHIFT, mode, temp, simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
GET_MODE_BITSIZE (mode) - 1 - i), GET_MODE_PRECISION (mode) - 1 - i),
GET_MODE_BITSIZE (mode) - 1 - i); GET_MODE_PRECISION (mode) - 1 - i);
/* If all we did was surround TEMP with the two shifts, we /* If all we did was surround TEMP with the two shifts, we
haven't improved anything, so don't use it. Otherwise, haven't improved anything, so don't use it. Otherwise,
...@@ -5639,14 +5640,14 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, ...@@ -5639,14 +5640,14 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
&& (UINTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) && (UINTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
== ((unsigned HOST_WIDE_INT) 1 << (i + 1)) - 1)) == ((unsigned HOST_WIDE_INT) 1 << (i + 1)) - 1))
|| (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
&& (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0))) && (GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
== (unsigned int) i + 1)))) == (unsigned int) i + 1))))
return simplify_shift_const return simplify_shift_const
(NULL_RTX, ASHIFTRT, mode, (NULL_RTX, ASHIFTRT, mode,
simplify_shift_const (NULL_RTX, ASHIFT, mode, simplify_shift_const (NULL_RTX, ASHIFT, mode,
XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (XEXP (XEXP (x, 0), 0), 0),
GET_MODE_BITSIZE (mode) - (i + 1)), GET_MODE_PRECISION (mode) - (i + 1)),
GET_MODE_BITSIZE (mode) - (i + 1)); GET_MODE_PRECISION (mode) - (i + 1));
/* If only the low-order bit of X is possibly nonzero, (plus x -1) /* If only the low-order bit of X is possibly nonzero, (plus x -1)
can become (ashiftrt (ashift (xor x 1) C) C) where C is can become (ashiftrt (ashift (xor x 1) C) C) where C is
...@@ -5660,8 +5661,8 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, ...@@ -5660,8 +5661,8 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
return simplify_shift_const (NULL_RTX, ASHIFTRT, mode, return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
simplify_shift_const (NULL_RTX, ASHIFT, mode, simplify_shift_const (NULL_RTX, ASHIFT, mode,
gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx), gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
GET_MODE_BITSIZE (mode) - 1), GET_MODE_PRECISION (mode) - 1),
GET_MODE_BITSIZE (mode) - 1); GET_MODE_PRECISION (mode) - 1);
/* If we are adding two things that have no bits in common, convert /* If we are adding two things that have no bits in common, convert
the addition into an IOR. This will often be further simplified, the addition into an IOR. This will often be further simplified,
...@@ -5793,7 +5794,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, ...@@ -5793,7 +5794,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
&& op1 == const0_rtx && op1 == const0_rtx
&& mode == GET_MODE (op0) && mode == GET_MODE (op0)
&& (num_sign_bit_copies (op0, mode) && (num_sign_bit_copies (op0, mode)
== GET_MODE_BITSIZE (mode))) == GET_MODE_PRECISION (mode)))
{ {
op0 = expand_compound_operation (op0); op0 = expand_compound_operation (op0);
return simplify_gen_unary (NEG, mode, return simplify_gen_unary (NEG, mode,
...@@ -5818,7 +5819,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, ...@@ -5818,7 +5819,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
&& op1 == const0_rtx && op1 == const0_rtx
&& mode == GET_MODE (op0) && mode == GET_MODE (op0)
&& (num_sign_bit_copies (op0, mode) && (num_sign_bit_copies (op0, mode)
== GET_MODE_BITSIZE (mode))) == GET_MODE_PRECISION (mode)))
{ {
op0 = expand_compound_operation (op0); op0 = expand_compound_operation (op0);
return plus_constant (gen_lowpart (mode, op0), 1); return plus_constant (gen_lowpart (mode, op0), 1);
...@@ -5833,7 +5834,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, ...@@ -5833,7 +5834,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
&& new_code == NE && GET_MODE_CLASS (mode) == MODE_INT && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
&& op1 == const0_rtx && op1 == const0_rtx
&& (num_sign_bit_copies (op0, mode) && (num_sign_bit_copies (op0, mode)
== GET_MODE_BITSIZE (mode))) == GET_MODE_PRECISION (mode)))
return gen_lowpart (mode, return gen_lowpart (mode,
expand_compound_operation (op0)); expand_compound_operation (op0));
...@@ -5854,7 +5855,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, ...@@ -5854,7 +5855,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
&& op1 == const0_rtx && op1 == const0_rtx
&& mode == GET_MODE (op0) && mode == GET_MODE (op0)
&& (num_sign_bit_copies (op0, mode) && (num_sign_bit_copies (op0, mode)
== GET_MODE_BITSIZE (mode))) == GET_MODE_PRECISION (mode)))
{ {
op0 = expand_compound_operation (op0); op0 = expand_compound_operation (op0);
return simplify_gen_unary (NOT, mode, return simplify_gen_unary (NOT, mode,
...@@ -5887,7 +5888,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest, ...@@ -5887,7 +5888,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
{ {
x = simplify_shift_const (NULL_RTX, ASHIFT, mode, x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
expand_compound_operation (op0), expand_compound_operation (op0),
GET_MODE_BITSIZE (mode) - 1 - i); GET_MODE_PRECISION (mode) - 1 - i);
if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx) if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
return XEXP (x, 0); return XEXP (x, 0);
else else
...@@ -6011,7 +6012,7 @@ simplify_if_then_else (rtx x) ...@@ -6011,7 +6012,7 @@ simplify_if_then_else (rtx x)
} }
else if (true_code == EQ && true_val == const0_rtx else if (true_code == EQ && true_val == const0_rtx
&& (num_sign_bit_copies (from, GET_MODE (from)) && (num_sign_bit_copies (from, GET_MODE (from))
== GET_MODE_BITSIZE (GET_MODE (from)))) == GET_MODE_PRECISION (GET_MODE (from))))
{ {
false_code = EQ; false_code = EQ;
false_val = constm1_rtx; false_val = constm1_rtx;
...@@ -6181,8 +6182,8 @@ simplify_if_then_else (rtx x) ...@@ -6181,8 +6182,8 @@ simplify_if_then_else (rtx x)
&& rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f) && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
&& (num_sign_bit_copies (f, GET_MODE (f)) && (num_sign_bit_copies (f, GET_MODE (f))
> (unsigned int) > (unsigned int)
(GET_MODE_BITSIZE (mode) (GET_MODE_PRECISION (mode)
- GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0)))))) - GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (t, 0), 0))))))
{ {
c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0)); c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
extend_op = SIGN_EXTEND; extend_op = SIGN_EXTEND;
...@@ -6197,8 +6198,8 @@ simplify_if_then_else (rtx x) ...@@ -6197,8 +6198,8 @@ simplify_if_then_else (rtx x)
&& rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f) && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
&& (num_sign_bit_copies (f, GET_MODE (f)) && (num_sign_bit_copies (f, GET_MODE (f))
> (unsigned int) > (unsigned int)
(GET_MODE_BITSIZE (mode) (GET_MODE_PRECISION (mode)
- GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1)))))) - GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (t, 0), 1))))))
{ {
c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0)); c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
extend_op = SIGN_EXTEND; extend_op = SIGN_EXTEND;
...@@ -6269,7 +6270,7 @@ simplify_if_then_else (rtx x) ...@@ -6269,7 +6270,7 @@ simplify_if_then_else (rtx x)
&& ((1 == nonzero_bits (XEXP (cond, 0), mode) && ((1 == nonzero_bits (XEXP (cond, 0), mode)
&& (i = exact_log2 (UINTVAL (true_rtx))) >= 0) && (i = exact_log2 (UINTVAL (true_rtx))) >= 0)
|| ((num_sign_bit_copies (XEXP (cond, 0), mode) || ((num_sign_bit_copies (XEXP (cond, 0), mode)
== GET_MODE_BITSIZE (mode)) == GET_MODE_PRECISION (mode))
&& (i = exact_log2 (-UINTVAL (true_rtx))) >= 0))) && (i = exact_log2 (-UINTVAL (true_rtx))) >= 0)))
return return
simplify_shift_const (NULL_RTX, ASHIFT, mode, simplify_shift_const (NULL_RTX, ASHIFT, mode,
...@@ -6535,8 +6536,8 @@ simplify_set (rtx x) ...@@ -6535,8 +6536,8 @@ simplify_set (rtx x)
if (dest == cc0_rtx if (dest == cc0_rtx
&& GET_CODE (src) == SUBREG && GET_CODE (src) == SUBREG
&& subreg_lowpart_p (src) && subreg_lowpart_p (src)
&& (GET_MODE_BITSIZE (GET_MODE (src)) && (GET_MODE_PRECISION (GET_MODE (src))
< GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src))))) < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (src)))))
{ {
rtx inner = SUBREG_REG (src); rtx inner = SUBREG_REG (src);
enum machine_mode inner_mode = GET_MODE (inner); enum machine_mode inner_mode = GET_MODE (inner);
...@@ -6588,7 +6589,7 @@ simplify_set (rtx x) ...@@ -6588,7 +6589,7 @@ simplify_set (rtx x)
#endif #endif
&& (num_sign_bit_copies (XEXP (XEXP (src, 0), 0), && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
GET_MODE (XEXP (XEXP (src, 0), 0))) GET_MODE (XEXP (XEXP (src, 0), 0)))
== GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0)))) == GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (src, 0), 0))))
&& ! side_effects_p (src)) && ! side_effects_p (src))
{ {
rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
...@@ -6764,7 +6765,7 @@ expand_compound_operation (rtx x) ...@@ -6764,7 +6765,7 @@ expand_compound_operation (rtx x)
if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0)))) if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
return x; return x;
len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))); len = GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)));
/* If the inner object has VOIDmode (the only way this can happen /* If the inner object has VOIDmode (the only way this can happen
is if it is an ASM_OPERANDS), we can't do anything since we don't is if it is an ASM_OPERANDS), we can't do anything since we don't
know how much masking to do. */ know how much masking to do. */
...@@ -6798,11 +6799,11 @@ expand_compound_operation (rtx x) ...@@ -6798,11 +6799,11 @@ expand_compound_operation (rtx x)
pos = INTVAL (XEXP (x, 2)); pos = INTVAL (XEXP (x, 2));
/* This should stay within the object being extracted, fail otherwise. */ /* This should stay within the object being extracted, fail otherwise. */
if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))) if (len + pos > GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))))
return x; return x;
if (BITS_BIG_ENDIAN) if (BITS_BIG_ENDIAN)
pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos; pos = GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) - len - pos;
break; break;
...@@ -6863,7 +6864,7 @@ expand_compound_operation (rtx x) ...@@ -6863,7 +6864,7 @@ expand_compound_operation (rtx x)
if (GET_CODE (XEXP (x, 0)) == TRUNCATE if (GET_CODE (XEXP (x, 0)) == TRUNCATE
&& GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x) && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
&& COMPARISON_P (XEXP (XEXP (x, 0), 0)) && COMPARISON_P (XEXP (XEXP (x, 0), 0))
&& (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) && (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
<= HOST_BITS_PER_WIDE_INT) <= HOST_BITS_PER_WIDE_INT)
&& (STORE_FLAG_VALUE & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) && (STORE_FLAG_VALUE & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
return XEXP (XEXP (x, 0), 0); return XEXP (XEXP (x, 0), 0);
...@@ -6873,7 +6874,7 @@ expand_compound_operation (rtx x) ...@@ -6873,7 +6874,7 @@ expand_compound_operation (rtx x)
&& GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x) && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
&& subreg_lowpart_p (XEXP (x, 0)) && subreg_lowpart_p (XEXP (x, 0))
&& COMPARISON_P (SUBREG_REG (XEXP (x, 0))) && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
&& (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) && (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
<= HOST_BITS_PER_WIDE_INT) <= HOST_BITS_PER_WIDE_INT)
&& (STORE_FLAG_VALUE & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) && (STORE_FLAG_VALUE & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
return SUBREG_REG (XEXP (x, 0)); return SUBREG_REG (XEXP (x, 0));
...@@ -6895,7 +6896,7 @@ expand_compound_operation (rtx x) ...@@ -6895,7 +6896,7 @@ expand_compound_operation (rtx x)
extraction. Then the constant of 31 would be substituted in extraction. Then the constant of 31 would be substituted in
to produce such a position. */ to produce such a position. */
modewidth = GET_MODE_BITSIZE (GET_MODE (x)); modewidth = GET_MODE_PRECISION (GET_MODE (x));
if (modewidth >= pos + len) if (modewidth >= pos + len)
{ {
enum machine_mode mode = GET_MODE (x); enum machine_mode mode = GET_MODE (x);
...@@ -6949,7 +6950,7 @@ expand_field_assignment (const_rtx x) ...@@ -6949,7 +6950,7 @@ expand_field_assignment (const_rtx x)
&& GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG) && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
{ {
inner = SUBREG_REG (XEXP (SET_DEST (x), 0)); inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))); len = GET_MODE_PRECISION (GET_MODE (XEXP (SET_DEST (x), 0)));
pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0))); pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
} }
else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
...@@ -6961,23 +6962,23 @@ expand_field_assignment (const_rtx x) ...@@ -6961,23 +6962,23 @@ expand_field_assignment (const_rtx x)
/* A constant position should stay within the width of INNER. */ /* A constant position should stay within the width of INNER. */
if (CONST_INT_P (pos) if (CONST_INT_P (pos)
&& INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner))) && INTVAL (pos) + len > GET_MODE_PRECISION (GET_MODE (inner)))
break; break;
if (BITS_BIG_ENDIAN) if (BITS_BIG_ENDIAN)
{ {
if (CONST_INT_P (pos)) if (CONST_INT_P (pos))
pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len pos = GEN_INT (GET_MODE_PRECISION (GET_MODE (inner)) - len
- INTVAL (pos)); - INTVAL (pos));
else if (GET_CODE (pos) == MINUS else if (GET_CODE (pos) == MINUS
&& CONST_INT_P (XEXP (pos, 1)) && CONST_INT_P (XEXP (pos, 1))
&& (INTVAL (XEXP (pos, 1)) && (INTVAL (XEXP (pos, 1))
== GET_MODE_BITSIZE (GET_MODE (inner)) - len)) == GET_MODE_PRECISION (GET_MODE (inner)) - len))
/* If position is ADJUST - X, new position is X. */ /* If position is ADJUST - X, new position is X. */
pos = XEXP (pos, 0); pos = XEXP (pos, 0);
else else
pos = simplify_gen_binary (MINUS, GET_MODE (pos), pos = simplify_gen_binary (MINUS, GET_MODE (pos),
GEN_INT (GET_MODE_BITSIZE ( GEN_INT (GET_MODE_PRECISION (
GET_MODE (inner)) GET_MODE (inner))
- len), - len),
pos); pos);
...@@ -7152,7 +7153,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, ...@@ -7152,7 +7153,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
: BITS_PER_UNIT)) == 0 : BITS_PER_UNIT)) == 0
/* We can't do this if we are widening INNER_MODE (it /* We can't do this if we are widening INNER_MODE (it
may not be aligned, for one thing). */ may not be aligned, for one thing). */
&& GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode) && GET_MODE_PRECISION (inner_mode) >= GET_MODE_PRECISION (tmode)
&& (inner_mode == tmode && (inner_mode == tmode
|| (! mode_dependent_address_p (XEXP (inner, 0)) || (! mode_dependent_address_p (XEXP (inner, 0))
&& ! MEM_VOLATILE_P (inner)))))) && ! MEM_VOLATILE_P (inner))))))
...@@ -7170,7 +7171,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, ...@@ -7170,7 +7171,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
/* POS counts from lsb, but make OFFSET count in memory order. */ /* POS counts from lsb, but make OFFSET count in memory order. */
if (BYTES_BIG_ENDIAN) if (BYTES_BIG_ENDIAN)
offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT; offset = (GET_MODE_PRECISION (is_mode) - len - pos) / BITS_PER_UNIT;
else else
offset = pos / BITS_PER_UNIT; offset = pos / BITS_PER_UNIT;
...@@ -7275,7 +7276,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, ...@@ -7275,7 +7276,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
other cases, we would only be going outside our object in cases when other cases, we would only be going outside our object in cases when
an original shift would have been undefined. */ an original shift would have been undefined. */
if (MEM_P (inner) if (MEM_P (inner)
&& ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode)) && ((pos_rtx == 0 && pos + len > GET_MODE_PRECISION (is_mode))
|| (pos_rtx != 0 && len != 1))) || (pos_rtx != 0 && len != 1)))
return 0; return 0;
...@@ -7550,7 +7551,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) ...@@ -7550,7 +7551,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
{ {
enum rtx_code code = GET_CODE (x); enum rtx_code code = GET_CODE (x);
enum machine_mode mode = GET_MODE (x); enum machine_mode mode = GET_MODE (x);
int mode_width = GET_MODE_BITSIZE (mode); int mode_width = GET_MODE_PRECISION (mode);
rtx rhs, lhs; rtx rhs, lhs;
enum rtx_code next_code; enum rtx_code next_code;
int i, j; int i, j;
...@@ -7709,7 +7710,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) ...@@ -7709,7 +7710,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
{ {
new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
new_rtx = make_extraction (mode, new_rtx, new_rtx = make_extraction (mode, new_rtx,
(GET_MODE_BITSIZE (mode) (GET_MODE_PRECISION (mode)
- INTVAL (XEXP (XEXP (x, 0), 1))), - INTVAL (XEXP (XEXP (x, 0), 1))),
NULL_RTX, i, 1, 0, in_code == COMPARE); NULL_RTX, i, 1, 0, in_code == COMPARE);
} }
...@@ -8100,7 +8101,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8100,7 +8101,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
/* It is not valid to do a right-shift in a narrower mode /* It is not valid to do a right-shift in a narrower mode
than the one it came in with. */ than the one it came in with. */
if ((code == LSHIFTRT || code == ASHIFTRT) if ((code == LSHIFTRT || code == ASHIFTRT)
&& GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x))) && GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (x)))
op_mode = GET_MODE (x); op_mode = GET_MODE (x);
/* Truncate MASK to fit OP_MODE. */ /* Truncate MASK to fit OP_MODE. */
...@@ -8208,7 +8209,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8208,7 +8209,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
unsigned HOST_WIDE_INT cval unsigned HOST_WIDE_INT cval
= UINTVAL (XEXP (x, 1)) = UINTVAL (XEXP (x, 1))
| (GET_MODE_MASK (GET_MODE (x)) & ~mask); | (GET_MODE_MASK (GET_MODE (x)) & ~mask);
int width = GET_MODE_BITSIZE (GET_MODE (x)); int width = GET_MODE_PRECISION (GET_MODE (x));
rtx y; rtx y;
/* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
...@@ -8236,7 +8237,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8236,7 +8237,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
This may eliminate that PLUS and, later, the AND. */ This may eliminate that PLUS and, later, the AND. */
{ {
unsigned int width = GET_MODE_BITSIZE (mode); unsigned int width = GET_MODE_PRECISION (mode);
unsigned HOST_WIDE_INT smask = mask; unsigned HOST_WIDE_INT smask = mask;
/* If MODE is narrower than HOST_WIDE_INT and mask is a negative /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
...@@ -8304,7 +8305,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8304,7 +8305,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
&& CONST_INT_P (XEXP (x, 1)) && CONST_INT_P (XEXP (x, 1))
&& ((INTVAL (XEXP (XEXP (x, 0), 1)) && ((INTVAL (XEXP (XEXP (x, 0), 1))
+ floor_log2 (INTVAL (XEXP (x, 1)))) + floor_log2 (INTVAL (XEXP (x, 1))))
< GET_MODE_BITSIZE (GET_MODE (x))) < GET_MODE_PRECISION (GET_MODE (x)))
&& (UINTVAL (XEXP (x, 1)) && (UINTVAL (XEXP (x, 1))
& ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0) & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
{ {
...@@ -8349,10 +8350,10 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8349,10 +8350,10 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
if (! (CONST_INT_P (XEXP (x, 1)) if (! (CONST_INT_P (XEXP (x, 1))
&& INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) >= 0
&& INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)) && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (mode))
&& ! (GET_MODE (XEXP (x, 1)) != VOIDmode && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1))) && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
< (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode)))) < (unsigned HOST_WIDE_INT) GET_MODE_PRECISION (mode))))
break; break;
/* If the shift count is a constant and we can do arithmetic in /* If the shift count is a constant and we can do arithmetic in
...@@ -8360,7 +8361,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8360,7 +8361,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
conservative form of the mask. */ conservative form of the mask. */
if (CONST_INT_P (XEXP (x, 1)) if (CONST_INT_P (XEXP (x, 1))
&& INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) >= 0
&& INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode) && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (op_mode)
&& HWI_COMPUTABLE_MODE_P (op_mode)) && HWI_COMPUTABLE_MODE_P (op_mode))
mask >>= INTVAL (XEXP (x, 1)); mask >>= INTVAL (XEXP (x, 1));
else else
...@@ -8411,17 +8412,17 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8411,17 +8412,17 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
bit. */ bit. */
&& ((INTVAL (XEXP (x, 1)) && ((INTVAL (XEXP (x, 1))
+ num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))) + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
>= GET_MODE_BITSIZE (GET_MODE (x))) >= GET_MODE_PRECISION (GET_MODE (x)))
&& exact_log2 (mask + 1) >= 0 && exact_log2 (mask + 1) >= 0
/* Number of bits left after the shift must be more than the mask /* Number of bits left after the shift must be more than the mask
needs. */ needs. */
&& ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1)) && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
<= GET_MODE_BITSIZE (GET_MODE (x))) <= GET_MODE_PRECISION (GET_MODE (x)))
/* Must be more sign bit copies than the mask needs. */ /* Must be more sign bit copies than the mask needs. */
&& ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))) && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
>= exact_log2 (mask + 1))) >= exact_log2 (mask + 1)))
x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
GEN_INT (GET_MODE_BITSIZE (GET_MODE (x)) GEN_INT (GET_MODE_PRECISION (GET_MODE (x))
- exact_log2 (mask + 1))); - exact_log2 (mask + 1)));
goto shiftrt; goto shiftrt;
...@@ -8448,20 +8449,20 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8448,20 +8449,20 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
represent a mask for all its bits in a single scalar. represent a mask for all its bits in a single scalar.
But we only care about the lower bits, so calculate these. */ But we only care about the lower bits, so calculate these. */
if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT) if (GET_MODE_PRECISION (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
{ {
nonzero = ~(unsigned HOST_WIDE_INT) 0; nonzero = ~(unsigned HOST_WIDE_INT) 0;
/* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) /* GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1))
is the number of bits a full-width mask would have set. is the number of bits a full-width mask would have set.
We need only shift if these are fewer than nonzero can We need only shift if these are fewer than nonzero can
hold. If not, we must keep all bits set in nonzero. */ hold. If not, we must keep all bits set in nonzero. */
if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) if (GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1))
< HOST_BITS_PER_WIDE_INT) < HOST_BITS_PER_WIDE_INT)
nonzero >>= INTVAL (XEXP (x, 1)) nonzero >>= INTVAL (XEXP (x, 1))
+ HOST_BITS_PER_WIDE_INT + HOST_BITS_PER_WIDE_INT
- GET_MODE_BITSIZE (GET_MODE (x)) ; - GET_MODE_PRECISION (GET_MODE (x)) ;
} }
else else
{ {
...@@ -8481,7 +8482,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8481,7 +8482,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
{ {
x = simplify_shift_const x = simplify_shift_const
(NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0), (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i); GET_MODE_PRECISION (GET_MODE (x)) - 1 - i);
if (GET_CODE (x) != ASHIFTRT) if (GET_CODE (x) != ASHIFTRT)
return force_to_mode (x, mode, mask, next_select); return force_to_mode (x, mode, mask, next_select);
...@@ -8504,7 +8505,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8504,7 +8505,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
&& CONST_INT_P (XEXP (x, 1)) && CONST_INT_P (XEXP (x, 1))
&& INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) >= 0
&& (INTVAL (XEXP (x, 1)) && (INTVAL (XEXP (x, 1))
<= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1)) <= GET_MODE_PRECISION (GET_MODE (x)) - (floor_log2 (mask) + 1))
&& GET_CODE (XEXP (x, 0)) == ASHIFT && GET_CODE (XEXP (x, 0)) == ASHIFT
&& XEXP (XEXP (x, 0), 1) == XEXP (x, 1)) && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
...@@ -8552,7 +8553,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, ...@@ -8552,7 +8553,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
&& CONST_INT_P (XEXP (XEXP (x, 0), 1)) && CONST_INT_P (XEXP (XEXP (x, 0), 1))
&& INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
&& (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask) && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
< GET_MODE_BITSIZE (GET_MODE (x))) < GET_MODE_PRECISION (GET_MODE (x)))
&& INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT) && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
{ {
temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)), temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
...@@ -8804,7 +8805,7 @@ if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse) ...@@ -8804,7 +8805,7 @@ if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
false values when testing X. */ false values when testing X. */
else if (x == constm1_rtx || x == const0_rtx else if (x == constm1_rtx || x == const0_rtx
|| (mode != VOIDmode || (mode != VOIDmode
&& num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode))) && num_sign_bit_copies (x, mode) == GET_MODE_PRECISION (mode)))
{ {
*ptrue = constm1_rtx, *pfalse = const0_rtx; *ptrue = constm1_rtx, *pfalse = const0_rtx;
return x; return x;
...@@ -9136,8 +9137,8 @@ make_field_assignment (rtx x) ...@@ -9136,8 +9137,8 @@ make_field_assignment (rtx x)
return x; return x;
pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len); pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest)) if (pos < 0 || pos + len > GET_MODE_PRECISION (GET_MODE (dest))
|| GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT || GET_MODE_PRECISION (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
|| (c1 & nonzero_bits (other, GET_MODE (dest))) != 0) || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
return x; return x;
...@@ -9158,7 +9159,7 @@ make_field_assignment (rtx x) ...@@ -9158,7 +9159,7 @@ make_field_assignment (rtx x)
other, pos), other, pos),
dest); dest);
src = force_to_mode (src, mode, src = force_to_mode (src, mode,
GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT GET_MODE_PRECISION (mode) >= HOST_BITS_PER_WIDE_INT
? ~(unsigned HOST_WIDE_INT) 0 ? ~(unsigned HOST_WIDE_INT) 0
: ((unsigned HOST_WIDE_INT) 1 << len) - 1, : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
0); 0);
...@@ -9580,7 +9581,7 @@ reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode, ...@@ -9580,7 +9581,7 @@ reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
{ {
unsigned HOST_WIDE_INT mask = rsp->nonzero_bits; unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)) if (GET_MODE_PRECISION (GET_MODE (x)) < GET_MODE_PRECISION (mode))
/* We don't know anything about the upper bits. */ /* We don't know anything about the upper bits. */
mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x)); mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
*nonzero &= mask; *nonzero &= mask;
...@@ -9626,7 +9627,7 @@ reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode, ...@@ -9626,7 +9627,7 @@ reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
return tem; return tem;
if (nonzero_sign_valid && rsp->sign_bit_copies != 0 if (nonzero_sign_valid && rsp->sign_bit_copies != 0
&& GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode)) && GET_MODE_PRECISION (GET_MODE (x)) == GET_MODE_PRECISION (mode))
*result = rsp->sign_bit_copies; *result = rsp->sign_bit_copies;
return NULL; return NULL;
...@@ -9651,7 +9652,7 @@ extended_count (const_rtx x, enum machine_mode mode, int unsignedp) ...@@ -9651,7 +9652,7 @@ extended_count (const_rtx x, enum machine_mode mode, int unsignedp)
return (unsignedp return (unsignedp
? (HWI_COMPUTABLE_MODE_P (mode) ? (HWI_COMPUTABLE_MODE_P (mode)
? (unsigned int) (GET_MODE_BITSIZE (mode) - 1 ? (unsigned int) (GET_MODE_PRECISION (mode) - 1
- floor_log2 (nonzero_bits (x, mode))) - floor_log2 (nonzero_bits (x, mode)))
: 0) : 0)
: num_sign_bit_copies (x, mode) - 1); : num_sign_bit_copies (x, mode) - 1);
...@@ -9802,7 +9803,7 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count, ...@@ -9802,7 +9803,7 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count,
{ {
if (orig_mode == mode) if (orig_mode == mode)
return mode; return mode;
gcc_assert (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (orig_mode)); gcc_assert (GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (orig_mode));
/* In general we can't perform in wider mode for right shift and rotate. */ /* In general we can't perform in wider mode for right shift and rotate. */
switch (code) switch (code)
...@@ -9811,8 +9812,8 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count, ...@@ -9811,8 +9812,8 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count,
/* We can still widen if the bits brought in from the left are identical /* We can still widen if the bits brought in from the left are identical
to the sign bit of ORIG_MODE. */ to the sign bit of ORIG_MODE. */
if (num_sign_bit_copies (op, mode) if (num_sign_bit_copies (op, mode)
> (unsigned) (GET_MODE_BITSIZE (mode) > (unsigned) (GET_MODE_PRECISION (mode)
- GET_MODE_BITSIZE (orig_mode))) - GET_MODE_PRECISION (orig_mode)))
return mode; return mode;
return orig_mode; return orig_mode;
...@@ -9829,7 +9830,7 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count, ...@@ -9829,7 +9830,7 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count,
int care_bits = low_bitmask_len (orig_mode, outer_const); int care_bits = low_bitmask_len (orig_mode, outer_const);
if (care_bits >= 0 if (care_bits >= 0
&& GET_MODE_BITSIZE (orig_mode) - care_bits >= count) && GET_MODE_PRECISION (orig_mode) - care_bits >= count)
return mode; return mode;
} }
/* fall through */ /* fall through */
...@@ -9845,9 +9846,9 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count, ...@@ -9845,9 +9846,9 @@ try_widen_shift_mode (enum rtx_code code, rtx op, int count,
} }
} }
/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift. /* Simplify a shift of VAROP by ORIG_COUNT bits. CODE says what kind
The result of the shift is RESULT_MODE. Return NULL_RTX if we cannot of shift. The result of the shift is RESULT_MODE. Return NULL_RTX
simplify it. Otherwise, return a simplified value. if we cannot simplify it. Otherwise, return a simplified value.
The shift is normally computed in the widest mode we find in VAROP, as The shift is normally computed in the widest mode we find in VAROP, as
long as it isn't a different number of words than RESULT_MODE. Exceptions long as it isn't a different number of words than RESULT_MODE. Exceptions
...@@ -9879,7 +9880,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -9879,7 +9880,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
/* If we were given an invalid count, don't do anything except exactly /* If we were given an invalid count, don't do anything except exactly
what was requested. */ what was requested. */
if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode)) if (orig_count < 0 || orig_count >= (int) GET_MODE_PRECISION (mode))
return NULL_RTX; return NULL_RTX;
count = orig_count; count = orig_count;
...@@ -9896,7 +9897,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -9896,7 +9897,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
/* Convert ROTATERT to ROTATE. */ /* Convert ROTATERT to ROTATE. */
if (code == ROTATERT) if (code == ROTATERT)
{ {
unsigned int bitsize = GET_MODE_BITSIZE (result_mode);; unsigned int bitsize = GET_MODE_PRECISION (result_mode);
code = ROTATE; code = ROTATE;
if (VECTOR_MODE_P (result_mode)) if (VECTOR_MODE_P (result_mode))
count = bitsize / GET_MODE_NUNITS (result_mode) - count; count = bitsize / GET_MODE_NUNITS (result_mode) - count;
...@@ -9917,12 +9918,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -9917,12 +9918,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
multiple operations, each of which are defined, we know what the multiple operations, each of which are defined, we know what the
result is supposed to be. */ result is supposed to be. */
if (count > (GET_MODE_BITSIZE (shift_mode) - 1)) if (count > (GET_MODE_PRECISION (shift_mode) - 1))
{ {
if (code == ASHIFTRT) if (code == ASHIFTRT)
count = GET_MODE_BITSIZE (shift_mode) - 1; count = GET_MODE_PRECISION (shift_mode) - 1;
else if (code == ROTATE || code == ROTATERT) else if (code == ROTATE || code == ROTATERT)
count %= GET_MODE_BITSIZE (shift_mode); count %= GET_MODE_PRECISION (shift_mode);
else else
{ {
/* We can't simply return zero because there may be an /* We can't simply return zero because there may be an
...@@ -9942,7 +9943,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -9942,7 +9943,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
is a no-op. */ is a no-op. */
if (code == ASHIFTRT if (code == ASHIFTRT
&& (num_sign_bit_copies (varop, shift_mode) && (num_sign_bit_copies (varop, shift_mode)
== GET_MODE_BITSIZE (shift_mode))) == GET_MODE_PRECISION (shift_mode)))
{ {
count = 0; count = 0;
break; break;
...@@ -9955,8 +9956,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -9955,8 +9956,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
if (code == ASHIFTRT if (code == ASHIFTRT
&& (count + num_sign_bit_copies (varop, shift_mode) && (count + num_sign_bit_copies (varop, shift_mode)
>= GET_MODE_BITSIZE (shift_mode))) >= GET_MODE_PRECISION (shift_mode)))
count = GET_MODE_BITSIZE (shift_mode) - 1; count = GET_MODE_PRECISION (shift_mode) - 1;
/* We simplify the tests below and elsewhere by converting /* We simplify the tests below and elsewhere by converting
ASHIFTRT to LSHIFTRT if we know the sign bit is clear. ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
...@@ -10086,7 +10087,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -10086,7 +10087,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
AND of a new shift with a mask. We compute the result below. */ AND of a new shift with a mask. We compute the result below. */
if (CONST_INT_P (XEXP (varop, 1)) if (CONST_INT_P (XEXP (varop, 1))
&& INTVAL (XEXP (varop, 1)) >= 0 && INTVAL (XEXP (varop, 1)) >= 0
&& INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop)) && INTVAL (XEXP (varop, 1)) < GET_MODE_PRECISION (GET_MODE (varop))
&& HWI_COMPUTABLE_MODE_P (result_mode) && HWI_COMPUTABLE_MODE_P (result_mode)
&& HWI_COMPUTABLE_MODE_P (mode) && HWI_COMPUTABLE_MODE_P (mode)
&& !VECTOR_MODE_P (result_mode)) && !VECTOR_MODE_P (result_mode))
...@@ -10101,11 +10102,11 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -10101,11 +10102,11 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2) we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2), with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
we can convert it to we can convert it to
(ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1). (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0) C3) C2) C1).
This simplifies certain SIGN_EXTEND operations. */ This simplifies certain SIGN_EXTEND operations. */
if (code == ASHIFT && first_code == ASHIFTRT if (code == ASHIFT && first_code == ASHIFTRT
&& count == (GET_MODE_BITSIZE (result_mode) && count == (GET_MODE_PRECISION (result_mode)
- GET_MODE_BITSIZE (GET_MODE (varop)))) - GET_MODE_PRECISION (GET_MODE (varop))))
{ {
/* C3 has the low-order C1 bits zero. */ /* C3 has the low-order C1 bits zero. */
...@@ -10173,7 +10174,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -10173,7 +10174,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
if (code == ASHIFTRT if (code == ASHIFTRT
|| (code == ROTATE && first_code == ASHIFTRT) || (code == ROTATE && first_code == ASHIFTRT)
|| GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT || GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT
|| (GET_MODE (varop) != result_mode || (GET_MODE (varop) != result_mode
&& (first_code == ASHIFTRT || first_code == LSHIFTRT && (first_code == ASHIFTRT || first_code == LSHIFTRT
|| first_code == ROTATE || first_code == ROTATE
...@@ -10261,7 +10262,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -10261,7 +10262,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
&& XEXP (XEXP (varop, 0), 1) == constm1_rtx && XEXP (XEXP (varop, 0), 1) == constm1_rtx
&& (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
&& (code == LSHIFTRT || code == ASHIFTRT) && (code == LSHIFTRT || code == ASHIFTRT)
&& count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1) && count == (GET_MODE_PRECISION (GET_MODE (varop)) - 1)
&& rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
{ {
count = 0; count = 0;
...@@ -10323,12 +10324,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -10323,12 +10324,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
case EQ: case EQ:
/* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
says that the sign bit can be tested, FOO has mode MODE, C is says that the sign bit can be tested, FOO has mode MODE, C is
GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit GET_MODE_PRECISION (MODE) - 1, and FOO has only its low-order bit
that may be nonzero. */ that may be nonzero. */
if (code == LSHIFTRT if (code == LSHIFTRT
&& XEXP (varop, 1) == const0_rtx && XEXP (varop, 1) == const0_rtx
&& GET_MODE (XEXP (varop, 0)) == result_mode && GET_MODE (XEXP (varop, 0)) == result_mode
&& count == (GET_MODE_BITSIZE (result_mode) - 1) && count == (GET_MODE_PRECISION (result_mode) - 1)
&& HWI_COMPUTABLE_MODE_P (result_mode) && HWI_COMPUTABLE_MODE_P (result_mode)
&& STORE_FLAG_VALUE == -1 && STORE_FLAG_VALUE == -1
&& nonzero_bits (XEXP (varop, 0), result_mode) == 1 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
...@@ -10345,7 +10346,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -10345,7 +10346,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
/* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
than the number of bits in the mode is equivalent to A. */ than the number of bits in the mode is equivalent to A. */
if (code == LSHIFTRT if (code == LSHIFTRT
&& count == (GET_MODE_BITSIZE (result_mode) - 1) && count == (GET_MODE_PRECISION (result_mode) - 1)
&& nonzero_bits (XEXP (varop, 0), result_mode) == 1) && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
{ {
varop = XEXP (varop, 0); varop = XEXP (varop, 0);
...@@ -10369,7 +10370,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -10369,7 +10370,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
is one less than the number of bits in the mode is is one less than the number of bits in the mode is
equivalent to (xor A 1). */ equivalent to (xor A 1). */
if (code == LSHIFTRT if (code == LSHIFTRT
&& count == (GET_MODE_BITSIZE (result_mode) - 1) && count == (GET_MODE_PRECISION (result_mode) - 1)
&& XEXP (varop, 1) == constm1_rtx && XEXP (varop, 1) == constm1_rtx
&& nonzero_bits (XEXP (varop, 0), result_mode) == 1 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
&& merge_outer_ops (&outer_op, &outer_const, XOR, 1, result_mode, && merge_outer_ops (&outer_op, &outer_const, XOR, 1, result_mode,
...@@ -10453,7 +10454,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -10453,7 +10454,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
&& GET_CODE (XEXP (varop, 0)) == ASHIFTRT && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
&& count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1) && count == (GET_MODE_PRECISION (GET_MODE (varop)) - 1)
&& (code == LSHIFTRT || code == ASHIFTRT) && (code == LSHIFTRT || code == ASHIFTRT)
&& CONST_INT_P (XEXP (XEXP (varop, 0), 1)) && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
&& INTVAL (XEXP (XEXP (varop, 0), 1)) == count && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
...@@ -10477,8 +10478,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -10477,8 +10478,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
&& GET_CODE (XEXP (varop, 0)) == LSHIFTRT && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
&& CONST_INT_P (XEXP (XEXP (varop, 0), 1)) && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
&& (INTVAL (XEXP (XEXP (varop, 0), 1)) && (INTVAL (XEXP (XEXP (varop, 0), 1))
>= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0))) >= (GET_MODE_PRECISION (GET_MODE (XEXP (varop, 0)))
- GET_MODE_BITSIZE (GET_MODE (varop))))) - GET_MODE_PRECISION (GET_MODE (varop)))))
{ {
rtx varop_inner = XEXP (varop, 0); rtx varop_inner = XEXP (varop, 0);
...@@ -10550,7 +10551,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode, ...@@ -10550,7 +10551,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
if (outer_op != UNKNOWN) if (outer_op != UNKNOWN)
{ {
if (GET_RTX_CLASS (outer_op) != RTX_UNARY if (GET_RTX_CLASS (outer_op) != RTX_UNARY
&& GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT) && GET_MODE_PRECISION (result_mode) < HOST_BITS_PER_WIDE_INT)
outer_const = trunc_int_for_mode (outer_const, result_mode); outer_const = trunc_int_for_mode (outer_const, result_mode);
if (outer_op == AND) if (outer_op == AND)
...@@ -10852,7 +10853,7 @@ static enum rtx_code ...@@ -10852,7 +10853,7 @@ static enum rtx_code
simplify_compare_const (enum rtx_code code, rtx op0, rtx *pop1) simplify_compare_const (enum rtx_code code, rtx op0, rtx *pop1)
{ {
enum machine_mode mode = GET_MODE (op0); enum machine_mode mode = GET_MODE (op0);
unsigned int mode_width = GET_MODE_BITSIZE (mode); unsigned int mode_width = GET_MODE_PRECISION (mode);
HOST_WIDE_INT const_op = INTVAL (*pop1); HOST_WIDE_INT const_op = INTVAL (*pop1);
/* Get the constant we are comparing against and turn off all bits /* Get the constant we are comparing against and turn off all bits
...@@ -11065,8 +11066,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -11065,8 +11066,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
&& XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
&& XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1) && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
&& (INTVAL (XEXP (op0, 1)) && (INTVAL (XEXP (op0, 1))
== (GET_MODE_BITSIZE (GET_MODE (op0)) == (GET_MODE_PRECISION (GET_MODE (op0))
- (GET_MODE_BITSIZE - (GET_MODE_PRECISION
(GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))))))) (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
{ {
op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0)); op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
...@@ -11134,7 +11135,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -11134,7 +11135,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
&& GET_CODE (inner_op1) == SUBREG && GET_CODE (inner_op1) == SUBREG
&& (GET_MODE (SUBREG_REG (inner_op0)) && (GET_MODE (SUBREG_REG (inner_op0))
== GET_MODE (SUBREG_REG (inner_op1))) == GET_MODE (SUBREG_REG (inner_op1)))
&& (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0))) && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (inner_op0)))
<= HOST_BITS_PER_WIDE_INT) <= HOST_BITS_PER_WIDE_INT)
&& (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0), && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
GET_MODE (SUBREG_REG (inner_op0))))) GET_MODE (SUBREG_REG (inner_op0)))))
...@@ -11197,7 +11198,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -11197,7 +11198,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
while (CONST_INT_P (op1)) while (CONST_INT_P (op1))
{ {
enum machine_mode mode = GET_MODE (op0); enum machine_mode mode = GET_MODE (op0);
unsigned int mode_width = GET_MODE_BITSIZE (mode); unsigned int mode_width = GET_MODE_PRECISION (mode);
unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
int equality_comparison_p; int equality_comparison_p;
int sign_bit_comparison_p; int sign_bit_comparison_p;
...@@ -11231,7 +11232,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -11231,7 +11232,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
if (sign_bit_comparison_p && HWI_COMPUTABLE_MODE_P (mode)) if (sign_bit_comparison_p && HWI_COMPUTABLE_MODE_P (mode))
op0 = force_to_mode (op0, mode, op0 = force_to_mode (op0, mode,
(unsigned HOST_WIDE_INT) 1 (unsigned HOST_WIDE_INT) 1
<< (GET_MODE_BITSIZE (mode) - 1), << (GET_MODE_PRECISION (mode) - 1),
0); 0);
/* Now try cases based on the opcode of OP0. If none of the cases /* Now try cases based on the opcode of OP0. If none of the cases
...@@ -11262,7 +11263,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -11262,7 +11263,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
else else
{ {
mode = new_mode; mode = new_mode;
i = (GET_MODE_BITSIZE (mode) - 1 - i); i = (GET_MODE_PRECISION (mode) - 1 - i);
} }
} }
...@@ -11426,7 +11427,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -11426,7 +11427,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
if (mode_width <= HOST_BITS_PER_WIDE_INT if (mode_width <= HOST_BITS_PER_WIDE_INT
&& subreg_lowpart_p (op0) && subreg_lowpart_p (op0)
&& GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width && GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op0))) > mode_width
&& GET_CODE (SUBREG_REG (op0)) == PLUS && GET_CODE (SUBREG_REG (op0)) == PLUS
&& CONST_INT_P (XEXP (SUBREG_REG (op0), 1))) && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
{ {
...@@ -11446,14 +11447,14 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -11446,14 +11447,14 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
/* (A - C1) sign-extends if it is positive and 1-extends /* (A - C1) sign-extends if it is positive and 1-extends
if it is negative, C2 both sign- and 1-extends. */ if it is negative, C2 both sign- and 1-extends. */
|| (num_sign_bit_copies (a, inner_mode) || (num_sign_bit_copies (a, inner_mode)
> (unsigned int) (GET_MODE_BITSIZE (inner_mode) > (unsigned int) (GET_MODE_PRECISION (inner_mode)
- mode_width) - mode_width)
&& const_op < 0))) && const_op < 0)))
|| ((unsigned HOST_WIDE_INT) c1 || ((unsigned HOST_WIDE_INT) c1
< (unsigned HOST_WIDE_INT) 1 << (mode_width - 2) < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
/* (A - C1) always sign-extends, like C2. */ /* (A - C1) always sign-extends, like C2. */
&& num_sign_bit_copies (a, inner_mode) && num_sign_bit_copies (a, inner_mode)
> (unsigned int) (GET_MODE_BITSIZE (inner_mode) > (unsigned int) (GET_MODE_PRECISION (inner_mode)
- (mode_width - 1)))) - (mode_width - 1))))
{ {
op0 = SUBREG_REG (op0); op0 = SUBREG_REG (op0);
...@@ -11464,7 +11465,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -11464,7 +11465,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
/* If the inner mode is narrower and we are extracting the low part, /* If the inner mode is narrower and we are extracting the low part,
we can treat the SUBREG as if it were a ZERO_EXTEND. */ we can treat the SUBREG as if it were a ZERO_EXTEND. */
if (subreg_lowpart_p (op0) if (subreg_lowpart_p (op0)
&& GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width) && GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op0))) < mode_width)
/* Fall through */ ; /* Fall through */ ;
else else
break; break;
...@@ -11713,10 +11714,10 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -11713,10 +11714,10 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
the code has been changed. */ the code has been changed. */
&& (0 && (0
#ifdef WORD_REGISTER_OPERATIONS #ifdef WORD_REGISTER_OPERATIONS
|| (mode_width > GET_MODE_BITSIZE (tmode) || (mode_width > GET_MODE_PRECISION (tmode)
&& mode_width <= BITS_PER_WORD) && mode_width <= BITS_PER_WORD)
#endif #endif
|| (mode_width <= GET_MODE_BITSIZE (tmode) || (mode_width <= GET_MODE_PRECISION (tmode)
&& subreg_lowpart_p (XEXP (op0, 0)))) && subreg_lowpart_p (XEXP (op0, 0))))
&& CONST_INT_P (XEXP (op0, 1)) && CONST_INT_P (XEXP (op0, 1))
&& mode_width <= HOST_BITS_PER_WIDE_INT && mode_width <= HOST_BITS_PER_WIDE_INT
...@@ -11983,7 +11984,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -11983,7 +11984,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
op1 = gen_lowpart (GET_MODE (op0), op1); op1 = gen_lowpart (GET_MODE (op0), op1);
} }
} }
else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) else if ((GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op0)))
<= HOST_BITS_PER_WIDE_INT) <= HOST_BITS_PER_WIDE_INT)
&& (nonzero_bits (SUBREG_REG (op0), && (nonzero_bits (SUBREG_REG (op0),
GET_MODE (SUBREG_REG (op0))) GET_MODE (SUBREG_REG (op0)))
...@@ -12045,11 +12046,11 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) ...@@ -12045,11 +12046,11 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
if (zero_extended if (zero_extended
|| ((num_sign_bit_copies (op0, tmode) || ((num_sign_bit_copies (op0, tmode)
> (unsigned int) (GET_MODE_BITSIZE (tmode) > (unsigned int) (GET_MODE_PRECISION (tmode)
- GET_MODE_BITSIZE (mode))) - GET_MODE_PRECISION (mode)))
&& (num_sign_bit_copies (op1, tmode) && (num_sign_bit_copies (op1, tmode)
> (unsigned int) (GET_MODE_BITSIZE (tmode) > (unsigned int) (GET_MODE_PRECISION (tmode)
- GET_MODE_BITSIZE (mode))))) - GET_MODE_PRECISION (mode)))))
{ {
/* If OP0 is an AND and we don't have an AND in MODE either, /* If OP0 is an AND and we don't have an AND in MODE either,
make a new AND in the proper mode. */ make a new AND in the proper mode. */
...@@ -12348,7 +12349,7 @@ record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data) ...@@ -12348,7 +12349,7 @@ record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
else if (GET_CODE (setter) == SET else if (GET_CODE (setter) == SET
&& GET_CODE (SET_DEST (setter)) == SUBREG && GET_CODE (SET_DEST (setter)) == SUBREG
&& SUBREG_REG (SET_DEST (setter)) == dest && SUBREG_REG (SET_DEST (setter)) == dest
&& GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD && GET_MODE_PRECISION (GET_MODE (dest)) <= BITS_PER_WORD
&& subreg_lowpart_p (SET_DEST (setter))) && subreg_lowpart_p (SET_DEST (setter)))
record_value_for_reg (dest, record_dead_insn, record_value_for_reg (dest, record_dead_insn,
gen_lowpart (GET_MODE (dest), gen_lowpart (GET_MODE (dest),
...@@ -12445,7 +12446,7 @@ record_promoted_value (rtx insn, rtx subreg) ...@@ -12445,7 +12446,7 @@ record_promoted_value (rtx insn, rtx subreg)
unsigned int regno = REGNO (SUBREG_REG (subreg)); unsigned int regno = REGNO (SUBREG_REG (subreg));
enum machine_mode mode = GET_MODE (subreg); enum machine_mode mode = GET_MODE (subreg);
if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
return; return;
for (links = LOG_LINKS (insn); links;) for (links = LOG_LINKS (insn); links;)
......
...@@ -3650,7 +3650,7 @@ fold_rtx (rtx x, rtx insn) ...@@ -3650,7 +3650,7 @@ fold_rtx (rtx x, rtx insn)
enum rtx_code associate_code; enum rtx_code associate_code;
if (is_shift if (is_shift
&& (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode) && (INTVAL (const_arg1) >= GET_MODE_PRECISION (mode)
|| INTVAL (const_arg1) < 0)) || INTVAL (const_arg1) < 0))
{ {
if (SHIFT_COUNT_TRUNCATED) if (SHIFT_COUNT_TRUNCATED)
...@@ -3699,7 +3699,7 @@ fold_rtx (rtx x, rtx insn) ...@@ -3699,7 +3699,7 @@ fold_rtx (rtx x, rtx insn)
break; break;
if (is_shift if (is_shift
&& (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode) && (INTVAL (inner_const) >= GET_MODE_PRECISION (mode)
|| INTVAL (inner_const) < 0)) || INTVAL (inner_const) < 0))
{ {
if (SHIFT_COUNT_TRUNCATED) if (SHIFT_COUNT_TRUNCATED)
...@@ -3729,7 +3729,7 @@ fold_rtx (rtx x, rtx insn) ...@@ -3729,7 +3729,7 @@ fold_rtx (rtx x, rtx insn)
if (is_shift if (is_shift
&& CONST_INT_P (new_const) && CONST_INT_P (new_const)
&& INTVAL (new_const) >= GET_MODE_BITSIZE (mode)) && INTVAL (new_const) >= GET_MODE_PRECISION (mode))
{ {
/* As an exception, we can turn an ASHIFTRT of this /* As an exception, we can turn an ASHIFTRT of this
form into a shift of the number of bits - 1. */ form into a shift of the number of bits - 1. */
...@@ -4672,13 +4672,13 @@ cse_insn (rtx insn) ...@@ -4672,13 +4672,13 @@ cse_insn (rtx insn)
if (src_const && src_related == 0 && CONST_INT_P (src_const) if (src_const && src_related == 0 && CONST_INT_P (src_const)
&& GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_BITSIZE (mode) < BITS_PER_WORD) && GET_MODE_PRECISION (mode) < BITS_PER_WORD)
{ {
enum machine_mode wider_mode; enum machine_mode wider_mode;
for (wider_mode = GET_MODE_WIDER_MODE (mode); for (wider_mode = GET_MODE_WIDER_MODE (mode);
wider_mode != VOIDmode wider_mode != VOIDmode
&& GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD
&& src_related == 0; && src_related == 0;
wider_mode = GET_MODE_WIDER_MODE (wider_mode)) wider_mode = GET_MODE_WIDER_MODE (wider_mode))
{ {
...@@ -5031,7 +5031,7 @@ cse_insn (rtx insn) ...@@ -5031,7 +5031,7 @@ cse_insn (rtx insn)
&& CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1)) && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
&& CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2)) && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
&& REG_P (XEXP (SET_DEST (sets[i].rtl), 0)) && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
&& (GET_MODE_BITSIZE (GET_MODE (SET_DEST (sets[i].rtl))) && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl)))
>= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))) >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
&& ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)) && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
+ (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2)) + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
...@@ -5058,7 +5058,7 @@ cse_insn (rtx insn) ...@@ -5058,7 +5058,7 @@ cse_insn (rtx insn)
HOST_WIDE_INT mask; HOST_WIDE_INT mask;
unsigned int shift; unsigned int shift;
if (BITS_BIG_ENDIAN) if (BITS_BIG_ENDIAN)
shift = GET_MODE_BITSIZE (GET_MODE (dest_reg)) shift = GET_MODE_PRECISION (GET_MODE (dest_reg))
- INTVAL (pos) - INTVAL (width); - INTVAL (pos) - INTVAL (width);
else else
shift = INTVAL (pos); shift = INTVAL (pos);
......
...@@ -51,7 +51,7 @@ static rtx break_out_memory_refs (rtx); ...@@ -51,7 +51,7 @@ static rtx break_out_memory_refs (rtx);
HOST_WIDE_INT HOST_WIDE_INT
trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode) trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
{ {
int width = GET_MODE_BITSIZE (mode); int width = GET_MODE_PRECISION (mode);
/* You want to truncate to a _what_? */ /* You want to truncate to a _what_? */
gcc_assert (SCALAR_INT_MODE_P (mode)); gcc_assert (SCALAR_INT_MODE_P (mode));
......
...@@ -465,7 +465,7 @@ doloop_modify (struct loop *loop, struct niter_desc *desc, ...@@ -465,7 +465,7 @@ doloop_modify (struct loop *loop, struct niter_desc *desc,
Note that the maximum value loaded is iterations_max - 1. */ Note that the maximum value loaded is iterations_max - 1. */
if (desc->niter_max if (desc->niter_max
<= ((unsigned HOST_WIDEST_INT) 1 <= ((unsigned HOST_WIDEST_INT) 1
<< (GET_MODE_BITSIZE (mode) - 1))) << (GET_MODE_PRECISION (mode) - 1)))
nonneg = 1; nonneg = 1;
break; break;
...@@ -677,7 +677,7 @@ doloop_optimize (struct loop *loop) ...@@ -677,7 +677,7 @@ doloop_optimize (struct loop *loop)
doloop_seq = gen_doloop_end (doloop_reg, iterations, iterations_max, doloop_seq = gen_doloop_end (doloop_reg, iterations, iterations_max,
GEN_INT (level), start_label); GEN_INT (level), start_label);
word_mode_size = GET_MODE_BITSIZE (word_mode); word_mode_size = GET_MODE_PRECISION (word_mode);
word_mode_max word_mode_max
= ((unsigned HOST_WIDE_INT) 1 << (word_mode_size - 1) << 1) - 1; = ((unsigned HOST_WIDE_INT) 1 << (word_mode_size - 1) << 1) - 1;
if (! doloop_seq if (! doloop_seq
...@@ -685,10 +685,10 @@ doloop_optimize (struct loop *loop) ...@@ -685,10 +685,10 @@ doloop_optimize (struct loop *loop)
/* Before trying mode different from the one in that # of iterations is /* Before trying mode different from the one in that # of iterations is
computed, we must be sure that the number of iterations fits into computed, we must be sure that the number of iterations fits into
the new mode. */ the new mode. */
&& (word_mode_size >= GET_MODE_BITSIZE (mode) && (word_mode_size >= GET_MODE_PRECISION (mode)
|| desc->niter_max <= word_mode_max)) || desc->niter_max <= word_mode_max))
{ {
if (word_mode_size > GET_MODE_BITSIZE (mode)) if (word_mode_size > GET_MODE_PRECISION (mode))
{ {
zero_extend_p = true; zero_extend_p = true;
iterations = simplify_gen_unary (ZERO_EXTEND, word_mode, iterations = simplify_gen_unary (ZERO_EXTEND, word_mode,
......
...@@ -3177,7 +3177,7 @@ subreg_lsb_1 (enum machine_mode outer_mode, ...@@ -3177,7 +3177,7 @@ subreg_lsb_1 (enum machine_mode outer_mode,
unsigned int word; unsigned int word;
/* A paradoxical subreg begins at bit position 0. */ /* A paradoxical subreg begins at bit position 0. */
if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode)) if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
return 0; return 0;
if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN) if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
...@@ -3281,7 +3281,7 @@ subreg_get_info (unsigned int xregno, enum machine_mode xmode, ...@@ -3281,7 +3281,7 @@ subreg_get_info (unsigned int xregno, enum machine_mode xmode,
/* Paradoxical subregs are otherwise valid. */ /* Paradoxical subregs are otherwise valid. */
if (!rknown if (!rknown
&& offset == 0 && offset == 0
&& GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode)) && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
{ {
info->representable_p = true; info->representable_p = true;
/* If this is a big endian paradoxical subreg, which uses more /* If this is a big endian paradoxical subreg, which uses more
...@@ -3850,7 +3850,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -3850,7 +3850,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
unsigned HOST_WIDE_INT inner_nz; unsigned HOST_WIDE_INT inner_nz;
enum rtx_code code; enum rtx_code code;
enum machine_mode inner_mode; enum machine_mode inner_mode;
unsigned int mode_width = GET_MODE_BITSIZE (mode); unsigned int mode_width = GET_MODE_PRECISION (mode);
/* For floating-point and vector values, assume all bits are needed. */ /* For floating-point and vector values, assume all bits are needed. */
if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode) if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
...@@ -3858,11 +3858,11 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -3858,11 +3858,11 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
return nonzero; return nonzero;
/* If X is wider than MODE, use its mode instead. */ /* If X is wider than MODE, use its mode instead. */
if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width) if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
{ {
mode = GET_MODE (x); mode = GET_MODE (x);
nonzero = GET_MODE_MASK (mode); nonzero = GET_MODE_MASK (mode);
mode_width = GET_MODE_BITSIZE (mode); mode_width = GET_MODE_PRECISION (mode);
} }
if (mode_width > HOST_BITS_PER_WIDE_INT) if (mode_width > HOST_BITS_PER_WIDE_INT)
...@@ -3879,9 +3879,9 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -3879,9 +3879,9 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
not known to be zero. */ not known to be zero. */
if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
&& GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
&& GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
&& GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x))) && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
{ {
nonzero &= cached_nonzero_bits (x, GET_MODE (x), nonzero &= cached_nonzero_bits (x, GET_MODE (x),
known_x, known_mode, known_ret); known_x, known_mode, known_ret);
...@@ -3989,7 +3989,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -3989,7 +3989,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
/* Disabled to avoid exponential mutual recursion between nonzero_bits /* Disabled to avoid exponential mutual recursion between nonzero_bits
and num_sign_bit_copies. */ and num_sign_bit_copies. */
if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
== GET_MODE_BITSIZE (GET_MODE (x))) == GET_MODE_PRECISION (GET_MODE (x)))
nonzero = 1; nonzero = 1;
#endif #endif
...@@ -4002,7 +4002,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4002,7 +4002,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
/* Disabled to avoid exponential mutual recursion between nonzero_bits /* Disabled to avoid exponential mutual recursion between nonzero_bits
and num_sign_bit_copies. */ and num_sign_bit_copies. */
if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
== GET_MODE_BITSIZE (GET_MODE (x))) == GET_MODE_PRECISION (GET_MODE (x)))
nonzero = 1; nonzero = 1;
#endif #endif
break; break;
...@@ -4075,7 +4075,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4075,7 +4075,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
unsigned HOST_WIDE_INT nz1 unsigned HOST_WIDE_INT nz1
= cached_nonzero_bits (XEXP (x, 1), mode, = cached_nonzero_bits (XEXP (x, 1), mode,
known_x, known_mode, known_ret); known_x, known_mode, known_ret);
int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1; int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
int width0 = floor_log2 (nz0) + 1; int width0 = floor_log2 (nz0) + 1;
int width1 = floor_log2 (nz1) + 1; int width1 = floor_log2 (nz1) + 1;
int low0 = floor_log2 (nz0 & -nz0); int low0 = floor_log2 (nz0 & -nz0);
...@@ -4156,8 +4156,8 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4156,8 +4156,8 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
/* If the inner mode is a single word for both the host and target /* If the inner mode is a single word for both the host and target
machines, we can compute this from which bits of the inner machines, we can compute this from which bits of the inner
object might be nonzero. */ object might be nonzero. */
if (GET_MODE_BITSIZE (inner_mode) <= BITS_PER_WORD if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
&& (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT)) && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
{ {
nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode, nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
known_x, known_mode, known_ret); known_x, known_mode, known_ret);
...@@ -4174,8 +4174,8 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4174,8 +4174,8 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
/* On many CISC machines, accessing an object in a wider mode /* On many CISC machines, accessing an object in a wider mode
causes the high-order bits to become undefined. So they are causes the high-order bits to become undefined. So they are
not known to be zero. */ not known to be zero. */
if (GET_MODE_SIZE (GET_MODE (x)) if (GET_MODE_PRECISION (GET_MODE (x))
> GET_MODE_SIZE (inner_mode)) > GET_MODE_PRECISION (inner_mode))
nonzero |= (GET_MODE_MASK (GET_MODE (x)) nonzero |= (GET_MODE_MASK (GET_MODE (x))
& ~GET_MODE_MASK (inner_mode)); & ~GET_MODE_MASK (inner_mode));
} }
...@@ -4195,10 +4195,10 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4195,10 +4195,10 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
if (CONST_INT_P (XEXP (x, 1)) if (CONST_INT_P (XEXP (x, 1))
&& INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) >= 0
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
&& INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x))) && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
{ {
enum machine_mode inner_mode = GET_MODE (x); enum machine_mode inner_mode = GET_MODE (x);
unsigned int width = GET_MODE_BITSIZE (inner_mode); unsigned int width = GET_MODE_PRECISION (inner_mode);
int count = INTVAL (XEXP (x, 1)); int count = INTVAL (XEXP (x, 1));
unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
unsigned HOST_WIDE_INT op_nonzero unsigned HOST_WIDE_INT op_nonzero
...@@ -4351,7 +4351,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4351,7 +4351,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
unsigned int known_ret) unsigned int known_ret)
{ {
enum rtx_code code = GET_CODE (x); enum rtx_code code = GET_CODE (x);
unsigned int bitwidth = GET_MODE_BITSIZE (mode); unsigned int bitwidth = GET_MODE_PRECISION (mode);
int num0, num1, result; int num0, num1, result;
unsigned HOST_WIDE_INT nonzero; unsigned HOST_WIDE_INT nonzero;
...@@ -4367,15 +4367,15 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4367,15 +4367,15 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
return 1; return 1;
/* For a smaller object, just ignore the high bits. */ /* For a smaller object, just ignore the high bits. */
if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x))) if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
{ {
num0 = cached_num_sign_bit_copies (x, GET_MODE (x), num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
known_x, known_mode, known_ret); known_x, known_mode, known_ret);
return MAX (1, return MAX (1,
num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)); num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
} }
if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x))) if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
{ {
#ifndef WORD_REGISTER_OPERATIONS #ifndef WORD_REGISTER_OPERATIONS
/* If this machine does not do all register operations on the entire /* If this machine does not do all register operations on the entire
...@@ -4386,7 +4386,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4386,7 +4386,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
/* Likewise on machines that do, if the mode of the object is smaller /* Likewise on machines that do, if the mode of the object is smaller
than a word and loads of that size don't sign extend, we can say than a word and loads of that size don't sign extend, we can say
nothing about the high order bits. */ nothing about the high order bits. */
if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
#ifdef LOAD_EXTEND_OP #ifdef LOAD_EXTEND_OP
&& LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
#endif #endif
...@@ -4408,7 +4408,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4408,7 +4408,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
if (target_default_pointer_address_modes_p () if (target_default_pointer_address_modes_p ()
&& ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
&& mode == Pmode && REG_POINTER (x)) && mode == Pmode && REG_POINTER (x))
return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1; return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
#endif #endif
{ {
...@@ -4433,7 +4433,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4433,7 +4433,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
/* Some RISC machines sign-extend all loads of smaller than a word. */ /* Some RISC machines sign-extend all loads of smaller than a word. */
if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
return MAX (1, ((int) bitwidth return MAX (1, ((int) bitwidth
- (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1)); - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
#endif #endif
break; break;
...@@ -4457,17 +4457,17 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4457,17 +4457,17 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode, num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
known_x, known_mode, known_ret); known_x, known_mode, known_ret);
return MAX ((int) bitwidth return MAX ((int) bitwidth
- (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1, - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
num0); num0);
} }
/* For a smaller object, just ignore the high bits. */ /* For a smaller object, just ignore the high bits. */
if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))) if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
{ {
num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode, num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
known_x, known_mode, known_ret); known_x, known_mode, known_ret);
return MAX (1, (num0 return MAX (1, (num0
- (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
- bitwidth))); - bitwidth)));
} }
...@@ -4498,7 +4498,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4498,7 +4498,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
break; break;
case SIGN_EXTEND: case SIGN_EXTEND:
return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
+ cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
known_x, known_mode, known_ret)); known_x, known_mode, known_ret));
...@@ -4506,7 +4506,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4506,7 +4506,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
/* For a smaller object, just ignore the high bits. */ /* For a smaller object, just ignore the high bits. */
num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
known_x, known_mode, known_ret); known_x, known_mode, known_ret);
return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
- bitwidth))); - bitwidth)));
case NOT: case NOT:
...@@ -4683,7 +4683,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4683,7 +4683,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
known_x, known_mode, known_ret); known_x, known_mode, known_ret);
if (CONST_INT_P (XEXP (x, 1)) if (CONST_INT_P (XEXP (x, 1))
&& INTVAL (XEXP (x, 1)) > 0 && INTVAL (XEXP (x, 1)) > 0
&& INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x))) && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1))); num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
return num0; return num0;
...@@ -4693,7 +4693,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4693,7 +4693,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
if (!CONST_INT_P (XEXP (x, 1)) if (!CONST_INT_P (XEXP (x, 1))
|| INTVAL (XEXP (x, 1)) < 0 || INTVAL (XEXP (x, 1)) < 0
|| INTVAL (XEXP (x, 1)) >= (int) bitwidth || INTVAL (XEXP (x, 1)) >= (int) bitwidth
|| INTVAL (XEXP (x, 1)) >= GET_MODE_BITSIZE (GET_MODE (x))) || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
return 1; return 1;
num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
...@@ -4729,7 +4729,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4729,7 +4729,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
count those bits and return one less than that amount. If we can't count those bits and return one less than that amount. If we can't
safely compute the mask for this mode, always return BITWIDTH. */ safely compute the mask for this mode, always return BITWIDTH. */
bitwidth = GET_MODE_BITSIZE (mode); bitwidth = GET_MODE_PRECISION (mode);
if (bitwidth > HOST_BITS_PER_WIDE_INT) if (bitwidth > HOST_BITS_PER_WIDE_INT)
return 1; return 1;
...@@ -4998,7 +4998,7 @@ canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest, ...@@ -4998,7 +4998,7 @@ canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
&& CONST_INT_P (op1) && CONST_INT_P (op1)
&& GET_MODE (op0) != VOIDmode && GET_MODE (op0) != VOIDmode
&& GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
{ {
HOST_WIDE_INT const_val = INTVAL (op1); HOST_WIDE_INT const_val = INTVAL (op1);
unsigned HOST_WIDE_INT uconst_val = const_val; unsigned HOST_WIDE_INT uconst_val = const_val;
...@@ -5017,7 +5017,7 @@ canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest, ...@@ -5017,7 +5017,7 @@ canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
case GE: case GE:
if ((const_val & max_val) if ((const_val & max_val)
!= ((unsigned HOST_WIDE_INT) 1 != ((unsigned HOST_WIDE_INT) 1
<< (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))) << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0)); code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
break; break;
...@@ -5123,7 +5123,7 @@ init_num_sign_bit_copies_in_rep (void) ...@@ -5123,7 +5123,7 @@ init_num_sign_bit_copies_in_rep (void)
have to be sign-bit copies too. */ have to be sign-bit copies too. */
|| num_sign_bit_copies_in_rep [in_mode][mode]) || num_sign_bit_copies_in_rep [in_mode][mode])
num_sign_bit_copies_in_rep [in_mode][mode] num_sign_bit_copies_in_rep [in_mode][mode]
+= GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i); += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
} }
} }
} }
...@@ -5183,7 +5183,7 @@ low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m) ...@@ -5183,7 +5183,7 @@ low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m)
{ {
if (mode != VOIDmode) if (mode != VOIDmode)
{ {
if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
return -1; return -1;
m &= GET_MODE_MASK (mode); m &= GET_MODE_MASK (mode);
} }
......
...@@ -649,7 +649,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) ...@@ -649,7 +649,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
if (STORE_FLAG_VALUE == -1 if (STORE_FLAG_VALUE == -1
&& GET_CODE (op) == ASHIFTRT && GET_CODE (op) == ASHIFTRT
&& GET_CODE (XEXP (op, 1)) && GET_CODE (XEXP (op, 1))
&& INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1) && INTVAL (XEXP (op, 1)) == GET_MODE_PRECISION (mode) - 1)
return simplify_gen_relational (GE, mode, VOIDmode, return simplify_gen_relational (GE, mode, VOIDmode,
XEXP (op, 0), const0_rtx); XEXP (op, 0), const0_rtx);
...@@ -765,7 +765,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) ...@@ -765,7 +765,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
C is equal to the width of MODE minus 1. */ C is equal to the width of MODE minus 1. */
if (GET_CODE (op) == ASHIFTRT if (GET_CODE (op) == ASHIFTRT
&& CONST_INT_P (XEXP (op, 1)) && CONST_INT_P (XEXP (op, 1))
&& INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1) && INTVAL (XEXP (op, 1)) == GET_MODE_PRECISION (mode) - 1)
return simplify_gen_binary (LSHIFTRT, mode, return simplify_gen_binary (LSHIFTRT, mode,
XEXP (op, 0), XEXP (op, 1)); XEXP (op, 0), XEXP (op, 1));
...@@ -773,7 +773,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) ...@@ -773,7 +773,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
C is equal to the width of MODE minus 1. */ C is equal to the width of MODE minus 1. */
if (GET_CODE (op) == LSHIFTRT if (GET_CODE (op) == LSHIFTRT
&& CONST_INT_P (XEXP (op, 1)) && CONST_INT_P (XEXP (op, 1))
&& INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1) && INTVAL (XEXP (op, 1)) == GET_MODE_PRECISION (mode) - 1)
return simplify_gen_binary (ASHIFTRT, mode, return simplify_gen_binary (ASHIFTRT, mode,
XEXP (op, 0), XEXP (op, 1)); XEXP (op, 0), XEXP (op, 1));
...@@ -790,14 +790,14 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) ...@@ -790,14 +790,14 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
&& SCALAR_INT_MODE_P (GET_MODE (XEXP (op, 0)))) && SCALAR_INT_MODE_P (GET_MODE (XEXP (op, 0))))
{ {
enum machine_mode inner = GET_MODE (XEXP (op, 0)); enum machine_mode inner = GET_MODE (XEXP (op, 0));
int isize = GET_MODE_BITSIZE (inner); int isize = GET_MODE_PRECISION (inner);
if (STORE_FLAG_VALUE == 1) if (STORE_FLAG_VALUE == 1)
{ {
temp = simplify_gen_binary (ASHIFTRT, inner, XEXP (op, 0), temp = simplify_gen_binary (ASHIFTRT, inner, XEXP (op, 0),
GEN_INT (isize - 1)); GEN_INT (isize - 1));
if (mode == inner) if (mode == inner)
return temp; return temp;
if (GET_MODE_BITSIZE (mode) > isize) if (GET_MODE_PRECISION (mode) > isize)
return simplify_gen_unary (SIGN_EXTEND, mode, temp, inner); return simplify_gen_unary (SIGN_EXTEND, mode, temp, inner);
return simplify_gen_unary (TRUNCATE, mode, temp, inner); return simplify_gen_unary (TRUNCATE, mode, temp, inner);
} }
...@@ -807,7 +807,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) ...@@ -807,7 +807,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
GEN_INT (isize - 1)); GEN_INT (isize - 1));
if (mode == inner) if (mode == inner)
return temp; return temp;
if (GET_MODE_BITSIZE (mode) > isize) if (GET_MODE_PRECISION (mode) > isize)
return simplify_gen_unary (ZERO_EXTEND, mode, temp, inner); return simplify_gen_unary (ZERO_EXTEND, mode, temp, inner);
return simplify_gen_unary (TRUNCATE, mode, temp, inner); return simplify_gen_unary (TRUNCATE, mode, temp, inner);
} }
...@@ -854,8 +854,8 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) ...@@ -854,8 +854,8 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
patterns. */ patterns. */
if ((TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (op)) if ((TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (op))
? (num_sign_bit_copies (op, GET_MODE (op)) ? (num_sign_bit_copies (op, GET_MODE (op))
> (unsigned int) (GET_MODE_BITSIZE (GET_MODE (op)) > (unsigned int) (GET_MODE_PRECISION (GET_MODE (op))
- GET_MODE_BITSIZE (mode))) - GET_MODE_PRECISION (mode)))
: truncated_to_mode (mode, op)) : truncated_to_mode (mode, op))
&& ! (GET_CODE (op) == LSHIFTRT && ! (GET_CODE (op) == LSHIFTRT
&& GET_CODE (XEXP (op, 0)) == MULT)) && GET_CODE (XEXP (op, 0)) == MULT))
...@@ -904,7 +904,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) ...@@ -904,7 +904,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
&& (flag_unsafe_math_optimizations && (flag_unsafe_math_optimizations
|| (SCALAR_FLOAT_MODE_P (GET_MODE (op)) || (SCALAR_FLOAT_MODE_P (GET_MODE (op))
&& ((unsigned)significand_size (GET_MODE (op)) && ((unsigned)significand_size (GET_MODE (op))
>= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0))) >= (GET_MODE_PRECISION (GET_MODE (XEXP (op, 0)))
- num_sign_bit_copies (XEXP (op, 0), - num_sign_bit_copies (XEXP (op, 0),
GET_MODE (XEXP (op, 0)))))))) GET_MODE (XEXP (op, 0))))))))
return simplify_gen_unary (FLOAT, mode, return simplify_gen_unary (FLOAT, mode,
...@@ -941,7 +941,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) ...@@ -941,7 +941,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
|| (GET_CODE (op) == FLOAT || (GET_CODE (op) == FLOAT
&& SCALAR_FLOAT_MODE_P (GET_MODE (op)) && SCALAR_FLOAT_MODE_P (GET_MODE (op))
&& ((unsigned)significand_size (GET_MODE (op)) && ((unsigned)significand_size (GET_MODE (op))
>= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0))) >= (GET_MODE_PRECISION (GET_MODE (XEXP (op, 0)))
- num_sign_bit_copies (XEXP (op, 0), - num_sign_bit_copies (XEXP (op, 0),
GET_MODE (XEXP (op, 0))))))) GET_MODE (XEXP (op, 0)))))))
return simplify_gen_unary (GET_CODE (op), mode, return simplify_gen_unary (GET_CODE (op), mode,
...@@ -968,7 +968,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) ...@@ -968,7 +968,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
return op; return op;
/* If operand is known to be only -1 or 0, convert ABS to NEG. */ /* If operand is known to be only -1 or 0, convert ABS to NEG. */
if (num_sign_bit_copies (op, mode) == GET_MODE_BITSIZE (mode)) if (num_sign_bit_copies (op, mode) == GET_MODE_PRECISION (mode))
return gen_rtx_NEG (mode, op); return gen_rtx_NEG (mode, op);
break; break;
...@@ -1261,8 +1261,8 @@ rtx ...@@ -1261,8 +1261,8 @@ rtx
simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
rtx op, enum machine_mode op_mode) rtx op, enum machine_mode op_mode)
{ {
unsigned int width = GET_MODE_BITSIZE (mode); unsigned int width = GET_MODE_PRECISION (mode);
unsigned int op_width = GET_MODE_BITSIZE (op_mode); unsigned int op_width = GET_MODE_PRECISION (op_mode);
if (code == VEC_DUPLICATE) if (code == VEC_DUPLICATE)
{ {
...@@ -1362,7 +1362,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -1362,7 +1362,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
if (hv < 0) if (hv < 0)
return 0; return 0;
} }
else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2) else if (GET_MODE_PRECISION (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
; ;
else else
hv = 0, lv &= GET_MODE_MASK (op_mode); hv = 0, lv &= GET_MODE_MASK (op_mode);
...@@ -1403,17 +1403,17 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -1403,17 +1403,17 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
if (arg0 == 0 && CLZ_DEFINED_VALUE_AT_ZERO (op_mode, val)) if (arg0 == 0 && CLZ_DEFINED_VALUE_AT_ZERO (op_mode, val))
; ;
else else
val = GET_MODE_BITSIZE (op_mode) - floor_log2 (arg0) - 1; val = GET_MODE_PRECISION (op_mode) - floor_log2 (arg0) - 1;
break; break;
case CLRSB: case CLRSB:
arg0 &= GET_MODE_MASK (op_mode); arg0 &= GET_MODE_MASK (op_mode);
if (arg0 == 0) if (arg0 == 0)
val = GET_MODE_BITSIZE (op_mode) - 1; val = GET_MODE_PRECISION (op_mode) - 1;
else if (arg0 >= 0) else if (arg0 >= 0)
val = GET_MODE_BITSIZE (op_mode) - floor_log2 (arg0) - 2; val = GET_MODE_PRECISION (op_mode) - floor_log2 (arg0) - 2;
else if (arg0 < 0) else if (arg0 < 0)
val = GET_MODE_BITSIZE (op_mode) - floor_log2 (~arg0) - 2; val = GET_MODE_PRECISION (op_mode) - floor_log2 (~arg0) - 2;
break; break;
case CTZ: case CTZ:
...@@ -1423,7 +1423,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -1423,7 +1423,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
/* Even if the value at zero is undefined, we have to come /* Even if the value at zero is undefined, we have to come
up with some replacement. Seems good enough. */ up with some replacement. Seems good enough. */
if (! CTZ_DEFINED_VALUE_AT_ZERO (op_mode, val)) if (! CTZ_DEFINED_VALUE_AT_ZERO (op_mode, val))
val = GET_MODE_BITSIZE (op_mode); val = GET_MODE_PRECISION (op_mode);
} }
else else
val = ctz_hwi (arg0); val = ctz_hwi (arg0);
...@@ -1467,12 +1467,12 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -1467,12 +1467,12 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
/* When zero-extending a CONST_INT, we need to know its /* When zero-extending a CONST_INT, we need to know its
original mode. */ original mode. */
gcc_assert (op_mode != VOIDmode); gcc_assert (op_mode != VOIDmode);
if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT) if (op_width == HOST_BITS_PER_WIDE_INT)
{ {
/* If we were really extending the mode, /* If we were really extending the mode,
we would have to distinguish between zero-extension we would have to distinguish between zero-extension
and sign-extension. */ and sign-extension. */
gcc_assert (width == GET_MODE_BITSIZE (op_mode)); gcc_assert (width == op_width);
val = arg0; val = arg0;
} }
else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
...@@ -1484,15 +1484,16 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -1484,15 +1484,16 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
case SIGN_EXTEND: case SIGN_EXTEND:
if (op_mode == VOIDmode) if (op_mode == VOIDmode)
op_mode = mode; op_mode = mode;
if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT) op_width = GET_MODE_PRECISION (op_mode);
if (op_width == HOST_BITS_PER_WIDE_INT)
{ {
/* If we were really extending the mode, /* If we were really extending the mode,
we would have to distinguish between zero-extension we would have to distinguish between zero-extension
and sign-extension. */ and sign-extension. */
gcc_assert (width == GET_MODE_BITSIZE (op_mode)); gcc_assert (width == op_width);
val = arg0; val = arg0;
} }
else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) else if (op_width < HOST_BITS_PER_WIDE_INT)
{ {
val = arg0 & GET_MODE_MASK (op_mode); val = arg0 & GET_MODE_MASK (op_mode);
if (val_signbit_known_set_p (op_mode, val)) if (val_signbit_known_set_p (op_mode, val))
...@@ -1565,12 +1566,12 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -1565,12 +1566,12 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
case CLZ: case CLZ:
hv = 0; hv = 0;
if (h1 != 0) if (h1 != 0)
lv = GET_MODE_BITSIZE (mode) - floor_log2 (h1) - 1 lv = GET_MODE_PRECISION (mode) - floor_log2 (h1) - 1
- HOST_BITS_PER_WIDE_INT; - HOST_BITS_PER_WIDE_INT;
else if (l1 != 0) else if (l1 != 0)
lv = GET_MODE_BITSIZE (mode) - floor_log2 (l1) - 1; lv = GET_MODE_PRECISION (mode) - floor_log2 (l1) - 1;
else if (! CLZ_DEFINED_VALUE_AT_ZERO (mode, lv)) else if (! CLZ_DEFINED_VALUE_AT_ZERO (mode, lv))
lv = GET_MODE_BITSIZE (mode); lv = GET_MODE_PRECISION (mode);
break; break;
case CTZ: case CTZ:
...@@ -1580,7 +1581,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -1580,7 +1581,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
else if (h1 != 0) else if (h1 != 0)
lv = HOST_BITS_PER_WIDE_INT + ctz_hwi (h1); lv = HOST_BITS_PER_WIDE_INT + ctz_hwi (h1);
else if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, lv)) else if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, lv))
lv = GET_MODE_BITSIZE (mode); lv = GET_MODE_PRECISION (mode);
break; break;
case POPCOUNT: case POPCOUNT:
...@@ -1634,7 +1635,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -1634,7 +1635,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
case ZERO_EXTEND: case ZERO_EXTEND:
gcc_assert (op_mode != VOIDmode); gcc_assert (op_mode != VOIDmode);
if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT) if (op_width > HOST_BITS_PER_WIDE_INT)
return 0; return 0;
hv = 0; hv = 0;
...@@ -1643,7 +1644,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -1643,7 +1644,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
case SIGN_EXTEND: case SIGN_EXTEND:
if (op_mode == VOIDmode if (op_mode == VOIDmode
|| GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT) || op_width > HOST_BITS_PER_WIDE_INT)
return 0; return 0;
else else
{ {
...@@ -1920,7 +1921,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, ...@@ -1920,7 +1921,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
{ {
rtx tem, reversed, opleft, opright; rtx tem, reversed, opleft, opright;
HOST_WIDE_INT val; HOST_WIDE_INT val;
unsigned int width = GET_MODE_BITSIZE (mode); unsigned int width = GET_MODE_PRECISION (mode);
/* Even if we can't compute a constant result, /* Even if we can't compute a constant result,
there are some cases worth simplifying. */ there are some cases worth simplifying. */
...@@ -2505,7 +2506,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, ...@@ -2505,7 +2506,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
&& CONST_INT_P (XEXP (opleft, 1)) && CONST_INT_P (XEXP (opleft, 1))
&& CONST_INT_P (XEXP (opright, 1)) && CONST_INT_P (XEXP (opright, 1))
&& (INTVAL (XEXP (opleft, 1)) + INTVAL (XEXP (opright, 1)) && (INTVAL (XEXP (opleft, 1)) + INTVAL (XEXP (opright, 1))
== GET_MODE_BITSIZE (mode))) == GET_MODE_PRECISION (mode)))
return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (opleft, 1)); return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (opleft, 1));
/* Same, but for ashift that has been "simplified" to a wider mode /* Same, but for ashift that has been "simplified" to a wider mode
...@@ -2524,7 +2525,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, ...@@ -2524,7 +2525,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
&& CONST_INT_P (XEXP (SUBREG_REG (opleft), 1)) && CONST_INT_P (XEXP (SUBREG_REG (opleft), 1))
&& CONST_INT_P (XEXP (opright, 1)) && CONST_INT_P (XEXP (opright, 1))
&& (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1)) && (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1))
== GET_MODE_BITSIZE (mode))) == GET_MODE_PRECISION (mode)))
return gen_rtx_ROTATE (mode, XEXP (opright, 0), return gen_rtx_ROTATE (mode, XEXP (opright, 0),
XEXP (SUBREG_REG (opleft), 1)); XEXP (SUBREG_REG (opleft), 1));
...@@ -2702,7 +2703,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, ...@@ -2702,7 +2703,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
&& trueop1 == const1_rtx && trueop1 == const1_rtx
&& GET_CODE (op0) == LSHIFTRT && GET_CODE (op0) == LSHIFTRT
&& CONST_INT_P (XEXP (op0, 1)) && CONST_INT_P (XEXP (op0, 1))
&& INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1) && INTVAL (XEXP (op0, 1)) == GET_MODE_PRECISION (mode) - 1)
return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx); return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
/* (xor (comparison foo bar) (const_int sign-bit)) /* (xor (comparison foo bar) (const_int sign-bit))
...@@ -3061,7 +3062,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, ...@@ -3061,7 +3062,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
unsigned HOST_WIDE_INT zero_val = 0; unsigned HOST_WIDE_INT zero_val = 0;
if (CLZ_DEFINED_VALUE_AT_ZERO (imode, zero_val) if (CLZ_DEFINED_VALUE_AT_ZERO (imode, zero_val)
&& zero_val == GET_MODE_BITSIZE (imode) && zero_val == GET_MODE_PRECISION (imode)
&& INTVAL (trueop1) == exact_log2 (zero_val)) && INTVAL (trueop1) == exact_log2 (zero_val))
return simplify_gen_relational (EQ, mode, imode, return simplify_gen_relational (EQ, mode, imode,
XEXP (op0, 0), const0_rtx); XEXP (op0, 0), const0_rtx);
...@@ -3351,7 +3352,7 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -3351,7 +3352,7 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
{ {
HOST_WIDE_INT arg0, arg1, arg0s, arg1s; HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
HOST_WIDE_INT val; HOST_WIDE_INT val;
unsigned int width = GET_MODE_BITSIZE (mode); unsigned int width = GET_MODE_PRECISION (mode);
if (VECTOR_MODE_P (mode) if (VECTOR_MODE_P (mode)
&& code != VEC_CONCAT && code != VEC_CONCAT
...@@ -3636,24 +3637,24 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -3636,24 +3637,24 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
unsigned HOST_WIDE_INT cnt; unsigned HOST_WIDE_INT cnt;
if (SHIFT_COUNT_TRUNCATED) if (SHIFT_COUNT_TRUNCATED)
o1 = double_int_zext (o1, GET_MODE_BITSIZE (mode)); o1 = double_int_zext (o1, GET_MODE_PRECISION (mode));
if (!double_int_fits_in_uhwi_p (o1) if (!double_int_fits_in_uhwi_p (o1)
|| double_int_to_uhwi (o1) >= GET_MODE_BITSIZE (mode)) || double_int_to_uhwi (o1) >= GET_MODE_PRECISION (mode))
return 0; return 0;
cnt = double_int_to_uhwi (o1); cnt = double_int_to_uhwi (o1);
if (code == LSHIFTRT || code == ASHIFTRT) if (code == LSHIFTRT || code == ASHIFTRT)
res = double_int_rshift (o0, cnt, GET_MODE_BITSIZE (mode), res = double_int_rshift (o0, cnt, GET_MODE_PRECISION (mode),
code == ASHIFTRT); code == ASHIFTRT);
else if (code == ASHIFT) else if (code == ASHIFT)
res = double_int_lshift (o0, cnt, GET_MODE_BITSIZE (mode), res = double_int_lshift (o0, cnt, GET_MODE_PRECISION (mode),
true); true);
else if (code == ROTATE) else if (code == ROTATE)
res = double_int_lrotate (o0, cnt, GET_MODE_BITSIZE (mode)); res = double_int_lrotate (o0, cnt, GET_MODE_PRECISION (mode));
else /* code == ROTATERT */ else /* code == ROTATERT */
res = double_int_rrotate (o0, cnt, GET_MODE_BITSIZE (mode)); res = double_int_rrotate (o0, cnt, GET_MODE_PRECISION (mode));
} }
break; break;
...@@ -4626,7 +4627,7 @@ simplify_const_relational_operation (enum rtx_code code, ...@@ -4626,7 +4627,7 @@ simplify_const_relational_operation (enum rtx_code code,
&& (GET_CODE (trueop1) == CONST_DOUBLE && (GET_CODE (trueop1) == CONST_DOUBLE
|| CONST_INT_P (trueop1))) || CONST_INT_P (trueop1)))
{ {
int width = GET_MODE_BITSIZE (mode); int width = GET_MODE_PRECISION (mode);
HOST_WIDE_INT l0s, h0s, l1s, h1s; HOST_WIDE_INT l0s, h0s, l1s, h1s;
unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u; unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
...@@ -4814,7 +4815,7 @@ simplify_const_relational_operation (enum rtx_code code, ...@@ -4814,7 +4815,7 @@ simplify_const_relational_operation (enum rtx_code code,
rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1)); rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1));
if (CONST_INT_P (inner_const) && inner_const != const0_rtx) if (CONST_INT_P (inner_const) && inner_const != const0_rtx)
{ {
int sign_bitnum = GET_MODE_BITSIZE (mode) - 1; int sign_bitnum = GET_MODE_PRECISION (mode) - 1;
int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
&& (UINTVAL (inner_const) && (UINTVAL (inner_const)
& ((unsigned HOST_WIDE_INT) 1 & ((unsigned HOST_WIDE_INT) 1
...@@ -4906,7 +4907,7 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -4906,7 +4907,7 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
enum machine_mode op0_mode, rtx op0, rtx op1, enum machine_mode op0_mode, rtx op0, rtx op1,
rtx op2) rtx op2)
{ {
unsigned int width = GET_MODE_BITSIZE (mode); unsigned int width = GET_MODE_PRECISION (mode);
bool any_change = false; bool any_change = false;
rtx tem; rtx tem;
...@@ -4951,21 +4952,22 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode, ...@@ -4951,21 +4952,22 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
{ {
/* Extracting a bit-field from a constant */ /* Extracting a bit-field from a constant */
unsigned HOST_WIDE_INT val = UINTVAL (op0); unsigned HOST_WIDE_INT val = UINTVAL (op0);
HOST_WIDE_INT op1val = INTVAL (op1);
HOST_WIDE_INT op2val = INTVAL (op2);
if (BITS_BIG_ENDIAN) if (BITS_BIG_ENDIAN)
val >>= GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1); val >>= GET_MODE_PRECISION (op0_mode) - op2val - op1val;
else else
val >>= INTVAL (op2); val >>= op2val;
if (HOST_BITS_PER_WIDE_INT != INTVAL (op1)) if (HOST_BITS_PER_WIDE_INT != op1val)
{ {
/* First zero-extend. */ /* First zero-extend. */
val &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (op1)) - 1; val &= ((unsigned HOST_WIDE_INT) 1 << op1val) - 1;
/* If desired, propagate sign bit. */ /* If desired, propagate sign bit. */
if (code == SIGN_EXTRACT if (code == SIGN_EXTRACT
&& (val & ((unsigned HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))) && (val & ((unsigned HOST_WIDE_INT) 1 << (op1val - 1)))
!= 0) != 0)
val |= ~ (((unsigned HOST_WIDE_INT) 1 << INTVAL (op1)) - 1); val |= ~ (((unsigned HOST_WIDE_INT) 1 << op1val) - 1);
} }
return gen_int_mode (val, mode); return gen_int_mode (val, mode);
...@@ -5610,7 +5612,7 @@ simplify_subreg (enum machine_mode outermode, rtx op, ...@@ -5610,7 +5612,7 @@ simplify_subreg (enum machine_mode outermode, rtx op,
/* Optimize SUBREG truncations of zero and sign extended values. */ /* Optimize SUBREG truncations of zero and sign extended values. */
if ((GET_CODE (op) == ZERO_EXTEND if ((GET_CODE (op) == ZERO_EXTEND
|| GET_CODE (op) == SIGN_EXTEND) || GET_CODE (op) == SIGN_EXTEND)
&& GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)) && GET_MODE_PRECISION (outermode) < GET_MODE_PRECISION (innermode))
{ {
unsigned int bitpos = subreg_lsb_1 (outermode, innermode, byte); unsigned int bitpos = subreg_lsb_1 (outermode, innermode, byte);
...@@ -5626,7 +5628,7 @@ simplify_subreg (enum machine_mode outermode, rtx op, ...@@ -5626,7 +5628,7 @@ simplify_subreg (enum machine_mode outermode, rtx op,
enum machine_mode origmode = GET_MODE (XEXP (op, 0)); enum machine_mode origmode = GET_MODE (XEXP (op, 0));
if (outermode == origmode) if (outermode == origmode)
return XEXP (op, 0); return XEXP (op, 0);
if (GET_MODE_BITSIZE (outermode) <= GET_MODE_BITSIZE (origmode)) if (GET_MODE_PRECISION (outermode) <= GET_MODE_PRECISION (origmode))
return simplify_gen_subreg (outermode, XEXP (op, 0), origmode, return simplify_gen_subreg (outermode, XEXP (op, 0), origmode,
subreg_lowpart_offset (outermode, subreg_lowpart_offset (outermode,
origmode)); origmode));
...@@ -5638,7 +5640,7 @@ simplify_subreg (enum machine_mode outermode, rtx op, ...@@ -5638,7 +5640,7 @@ simplify_subreg (enum machine_mode outermode, rtx op,
/* A SUBREG resulting from a zero extension may fold to zero if /* A SUBREG resulting from a zero extension may fold to zero if
it extracts higher bits that the ZERO_EXTEND's source bits. */ it extracts higher bits that the ZERO_EXTEND's source bits. */
if (GET_CODE (op) == ZERO_EXTEND if (GET_CODE (op) == ZERO_EXTEND
&& bitpos >= GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))) && bitpos >= GET_MODE_PRECISION (GET_MODE (XEXP (op, 0))))
return CONST0_RTX (outermode); return CONST0_RTX (outermode);
} }
...@@ -5652,11 +5654,11 @@ simplify_subreg (enum machine_mode outermode, rtx op, ...@@ -5652,11 +5654,11 @@ simplify_subreg (enum machine_mode outermode, rtx op,
to avoid the possibility that an outer LSHIFTRT shifts by more to avoid the possibility that an outer LSHIFTRT shifts by more
than the sign extension's sign_bit_copies and introduces zeros than the sign extension's sign_bit_copies and introduces zeros
into the high bits of the result. */ into the high bits of the result. */
&& (2 * GET_MODE_BITSIZE (outermode)) <= GET_MODE_BITSIZE (innermode) && (2 * GET_MODE_PRECISION (outermode)) <= GET_MODE_PRECISION (innermode)
&& CONST_INT_P (XEXP (op, 1)) && CONST_INT_P (XEXP (op, 1))
&& GET_CODE (XEXP (op, 0)) == SIGN_EXTEND && GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
&& INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) && INTVAL (XEXP (op, 1)) < GET_MODE_PRECISION (outermode)
&& subreg_lsb_1 (outermode, innermode, byte) == 0) && subreg_lsb_1 (outermode, innermode, byte) == 0)
return simplify_gen_binary (ASHIFTRT, outermode, return simplify_gen_binary (ASHIFTRT, outermode,
XEXP (XEXP (op, 0), 0), XEXP (op, 1)); XEXP (XEXP (op, 0), 0), XEXP (op, 1));
...@@ -5667,11 +5669,11 @@ simplify_subreg (enum machine_mode outermode, rtx op, ...@@ -5667,11 +5669,11 @@ simplify_subreg (enum machine_mode outermode, rtx op,
if ((GET_CODE (op) == LSHIFTRT if ((GET_CODE (op) == LSHIFTRT
|| GET_CODE (op) == ASHIFTRT) || GET_CODE (op) == ASHIFTRT)
&& SCALAR_INT_MODE_P (outermode) && SCALAR_INT_MODE_P (outermode)
&& GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode) && GET_MODE_PRECISION (outermode) < GET_MODE_PRECISION (innermode)
&& CONST_INT_P (XEXP (op, 1)) && CONST_INT_P (XEXP (op, 1))
&& GET_CODE (XEXP (op, 0)) == ZERO_EXTEND && GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
&& INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) && INTVAL (XEXP (op, 1)) < GET_MODE_PRECISION (outermode)
&& subreg_lsb_1 (outermode, innermode, byte) == 0) && subreg_lsb_1 (outermode, innermode, byte) == 0)
return simplify_gen_binary (LSHIFTRT, outermode, return simplify_gen_binary (LSHIFTRT, outermode,
XEXP (XEXP (op, 0), 0), XEXP (op, 1)); XEXP (XEXP (op, 0), 0), XEXP (op, 1));
...@@ -5681,12 +5683,12 @@ simplify_subreg (enum machine_mode outermode, rtx op, ...@@ -5681,12 +5683,12 @@ simplify_subreg (enum machine_mode outermode, rtx op,
the outer subreg is effectively a truncation to the original mode. */ the outer subreg is effectively a truncation to the original mode. */
if (GET_CODE (op) == ASHIFT if (GET_CODE (op) == ASHIFT
&& SCALAR_INT_MODE_P (outermode) && SCALAR_INT_MODE_P (outermode)
&& GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode) && GET_MODE_PRECISION (outermode) < GET_MODE_PRECISION (innermode)
&& CONST_INT_P (XEXP (op, 1)) && CONST_INT_P (XEXP (op, 1))
&& (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND && (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
|| GET_CODE (XEXP (op, 0)) == SIGN_EXTEND) || GET_CODE (XEXP (op, 0)) == SIGN_EXTEND)
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
&& INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) && INTVAL (XEXP (op, 1)) < GET_MODE_PRECISION (outermode)
&& subreg_lsb_1 (outermode, innermode, byte) == 0) && subreg_lsb_1 (outermode, innermode, byte) == 0)
return simplify_gen_binary (ASHIFT, outermode, return simplify_gen_binary (ASHIFT, outermode,
XEXP (XEXP (op, 0), 0), XEXP (op, 1)); XEXP (XEXP (op, 0), 0), XEXP (op, 1));
...@@ -5695,12 +5697,12 @@ simplify_subreg (enum machine_mode outermode, rtx op, ...@@ -5695,12 +5697,12 @@ simplify_subreg (enum machine_mode outermode, rtx op,
if ((GET_CODE (op) == LSHIFTRT if ((GET_CODE (op) == LSHIFTRT
|| GET_CODE (op) == ASHIFTRT) || GET_CODE (op) == ASHIFTRT)
&& SCALAR_INT_MODE_P (outermode) && SCALAR_INT_MODE_P (outermode)
&& GET_MODE_BITSIZE (outermode) >= BITS_PER_WORD && GET_MODE_PRECISION (outermode) >= BITS_PER_WORD
&& GET_MODE_BITSIZE (innermode) >= (2 * GET_MODE_BITSIZE (outermode)) && GET_MODE_PRECISION (innermode) >= (2 * GET_MODE_PRECISION (outermode))
&& CONST_INT_P (XEXP (op, 1)) && CONST_INT_P (XEXP (op, 1))
&& (INTVAL (XEXP (op, 1)) & (GET_MODE_BITSIZE (outermode) - 1)) == 0 && (INTVAL (XEXP (op, 1)) & (GET_MODE_PRECISION (outermode) - 1)) == 0
&& INTVAL (XEXP (op, 1)) >= 0 && INTVAL (XEXP (op, 1)) >= 0
&& INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode) && INTVAL (XEXP (op, 1)) < GET_MODE_PRECISION (innermode)
&& byte == subreg_lowpart_offset (outermode, innermode)) && byte == subreg_lowpart_offset (outermode, innermode))
{ {
int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT; int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment