Commit 5511bc5a by Bernd Schmidt Committed by Bernd Schmidt

explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION instead of…

explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION instead of GET_MODE_BITSIZE where appropriate.

	* explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION
	instead of GET_MODE_BITSIZE where appropriate.
	* rtlanal.c (subreg_lsb_1, subreg_get_info, nonzero_bits1,
	num_sign_bit_copies1, canonicalize_condition, low_bitmask_len,
	init_num_sign_bit_copies_in_rep): Likewise.
	* cse.c (fold_rtx, cse_insn): Likewise.
	* loop-doloop.c (doloop_modify, doloop_optimize): Likewise.
	* simplify-rtx.c (simplify_unary_operation_1,
	simplify_const_unary_operation, simplify_binary_operation_1,
	simplify_const_binary_operation, simplify_ternary_operation,
	simplify_const_relational_operation, simplify_subreg): Likewise.
	* combine.c (try_combine, find_split_point, combine_simplify_rtx,
	simplify_if_then_else, simplify_set, expand_compound_operation,
	expand_field_assignment, make_extraction, if_then_else_cond,
	make_compound_operation, force_to_mode, make_field_assignment,
	reg_nonzero_bits_for_combine, reg_num_sign_bit_copies_for_combine,
	extended_count, try_widen_shift_mode, simplify_shift_const_1,
	simplify_comparison, record_promoted_value, simplify_compare_const,
	record_dead_and_set_regs_1): Likewise.

From-SVN: r175946
parent 46c9550f
...@@ -24,6 +24,26 @@ ...@@ -24,6 +24,26 @@
simplify_binary_operation_1, simplify_const_relational_operation): simplify_binary_operation_1, simplify_const_relational_operation):
Likewise. Likewise.
* explow.c (trunc_int_for_mode): Use GET_MODE_PRECISION
instead of GET_MODE_BITSIZE where appropriate.
* rtlanal.c (subreg_lsb_1, subreg_get_info, nonzero_bits1,
num_sign_bit_copies1, canonicalize_condition, low_bitmask_len,
init_num_sign_bit_copies_in_rep): Likewise.
* cse.c (fold_rtx, cse_insn): Likewise.
* loop-doloop.c (doloop_modify, doloop_optimize): Likewise.
* simplify-rtx.c (simplify_unary_operation_1,
simplify_const_unary_operation, simplify_binary_operation_1,
simplify_const_binary_operation, simplify_ternary_operation,
simplify_const_relational_operation, simplify_subreg): Likewise.
* combine.c (try_combine, find_split_point, combine_simplify_rtx,
simplify_if_then_else, simplify_set, expand_compound_operation,
expand_field_assignment, make_extraction, if_then_else_cond,
make_compound_operation, force_to_mode, make_field_assignment,
reg_nonzero_bits_for_combine, reg_num_sign_bit_copies_for_combine,
extended_count, try_widen_shift_mode, simplify_shift_const_1,
simplify_comparison, record_promoted_value, simplify_compare_const,
record_dead_and_set_regs_1): Likewise.
2011-07-06 Michael Meissner <meissner@linux.vnet.ibm.com> 2011-07-06 Michael Meissner <meissner@linux.vnet.ibm.com>
* config/rs6000/rs6000-protos.h (rs6000_call_indirect_aix): New * config/rs6000/rs6000-protos.h (rs6000_call_indirect_aix): New
......
...@@ -3650,7 +3650,7 @@ fold_rtx (rtx x, rtx insn) ...@@ -3650,7 +3650,7 @@ fold_rtx (rtx x, rtx insn)
enum rtx_code associate_code; enum rtx_code associate_code;
if (is_shift if (is_shift
&& (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode) && (INTVAL (const_arg1) >= GET_MODE_PRECISION (mode)
|| INTVAL (const_arg1) < 0)) || INTVAL (const_arg1) < 0))
{ {
if (SHIFT_COUNT_TRUNCATED) if (SHIFT_COUNT_TRUNCATED)
...@@ -3699,7 +3699,7 @@ fold_rtx (rtx x, rtx insn) ...@@ -3699,7 +3699,7 @@ fold_rtx (rtx x, rtx insn)
break; break;
if (is_shift if (is_shift
&& (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode) && (INTVAL (inner_const) >= GET_MODE_PRECISION (mode)
|| INTVAL (inner_const) < 0)) || INTVAL (inner_const) < 0))
{ {
if (SHIFT_COUNT_TRUNCATED) if (SHIFT_COUNT_TRUNCATED)
...@@ -3729,7 +3729,7 @@ fold_rtx (rtx x, rtx insn) ...@@ -3729,7 +3729,7 @@ fold_rtx (rtx x, rtx insn)
if (is_shift if (is_shift
&& CONST_INT_P (new_const) && CONST_INT_P (new_const)
&& INTVAL (new_const) >= GET_MODE_BITSIZE (mode)) && INTVAL (new_const) >= GET_MODE_PRECISION (mode))
{ {
/* As an exception, we can turn an ASHIFTRT of this /* As an exception, we can turn an ASHIFTRT of this
form into a shift of the number of bits - 1. */ form into a shift of the number of bits - 1. */
...@@ -4672,13 +4672,13 @@ cse_insn (rtx insn) ...@@ -4672,13 +4672,13 @@ cse_insn (rtx insn)
if (src_const && src_related == 0 && CONST_INT_P (src_const) if (src_const && src_related == 0 && CONST_INT_P (src_const)
&& GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_BITSIZE (mode) < BITS_PER_WORD) && GET_MODE_PRECISION (mode) < BITS_PER_WORD)
{ {
enum machine_mode wider_mode; enum machine_mode wider_mode;
for (wider_mode = GET_MODE_WIDER_MODE (mode); for (wider_mode = GET_MODE_WIDER_MODE (mode);
wider_mode != VOIDmode wider_mode != VOIDmode
&& GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD
&& src_related == 0; && src_related == 0;
wider_mode = GET_MODE_WIDER_MODE (wider_mode)) wider_mode = GET_MODE_WIDER_MODE (wider_mode))
{ {
...@@ -5031,7 +5031,7 @@ cse_insn (rtx insn) ...@@ -5031,7 +5031,7 @@ cse_insn (rtx insn)
&& CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1)) && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
&& CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2)) && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
&& REG_P (XEXP (SET_DEST (sets[i].rtl), 0)) && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
&& (GET_MODE_BITSIZE (GET_MODE (SET_DEST (sets[i].rtl))) && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl)))
>= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))) >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
&& ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)) && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
+ (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2)) + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
...@@ -5058,7 +5058,7 @@ cse_insn (rtx insn) ...@@ -5058,7 +5058,7 @@ cse_insn (rtx insn)
HOST_WIDE_INT mask; HOST_WIDE_INT mask;
unsigned int shift; unsigned int shift;
if (BITS_BIG_ENDIAN) if (BITS_BIG_ENDIAN)
shift = GET_MODE_BITSIZE (GET_MODE (dest_reg)) shift = GET_MODE_PRECISION (GET_MODE (dest_reg))
- INTVAL (pos) - INTVAL (width); - INTVAL (pos) - INTVAL (width);
else else
shift = INTVAL (pos); shift = INTVAL (pos);
......
...@@ -51,7 +51,7 @@ static rtx break_out_memory_refs (rtx); ...@@ -51,7 +51,7 @@ static rtx break_out_memory_refs (rtx);
HOST_WIDE_INT HOST_WIDE_INT
trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode) trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
{ {
int width = GET_MODE_BITSIZE (mode); int width = GET_MODE_PRECISION (mode);
/* You want to truncate to a _what_? */ /* You want to truncate to a _what_? */
gcc_assert (SCALAR_INT_MODE_P (mode)); gcc_assert (SCALAR_INT_MODE_P (mode));
......
...@@ -465,7 +465,7 @@ doloop_modify (struct loop *loop, struct niter_desc *desc, ...@@ -465,7 +465,7 @@ doloop_modify (struct loop *loop, struct niter_desc *desc,
Note that the maximum value loaded is iterations_max - 1. */ Note that the maximum value loaded is iterations_max - 1. */
if (desc->niter_max if (desc->niter_max
<= ((unsigned HOST_WIDEST_INT) 1 <= ((unsigned HOST_WIDEST_INT) 1
<< (GET_MODE_BITSIZE (mode) - 1))) << (GET_MODE_PRECISION (mode) - 1)))
nonneg = 1; nonneg = 1;
break; break;
...@@ -677,7 +677,7 @@ doloop_optimize (struct loop *loop) ...@@ -677,7 +677,7 @@ doloop_optimize (struct loop *loop)
doloop_seq = gen_doloop_end (doloop_reg, iterations, iterations_max, doloop_seq = gen_doloop_end (doloop_reg, iterations, iterations_max,
GEN_INT (level), start_label); GEN_INT (level), start_label);
word_mode_size = GET_MODE_BITSIZE (word_mode); word_mode_size = GET_MODE_PRECISION (word_mode);
word_mode_max word_mode_max
= ((unsigned HOST_WIDE_INT) 1 << (word_mode_size - 1) << 1) - 1; = ((unsigned HOST_WIDE_INT) 1 << (word_mode_size - 1) << 1) - 1;
if (! doloop_seq if (! doloop_seq
...@@ -685,10 +685,10 @@ doloop_optimize (struct loop *loop) ...@@ -685,10 +685,10 @@ doloop_optimize (struct loop *loop)
/* Before trying mode different from the one in that # of iterations is /* Before trying mode different from the one in that # of iterations is
computed, we must be sure that the number of iterations fits into computed, we must be sure that the number of iterations fits into
the new mode. */ the new mode. */
&& (word_mode_size >= GET_MODE_BITSIZE (mode) && (word_mode_size >= GET_MODE_PRECISION (mode)
|| desc->niter_max <= word_mode_max)) || desc->niter_max <= word_mode_max))
{ {
if (word_mode_size > GET_MODE_BITSIZE (mode)) if (word_mode_size > GET_MODE_PRECISION (mode))
{ {
zero_extend_p = true; zero_extend_p = true;
iterations = simplify_gen_unary (ZERO_EXTEND, word_mode, iterations = simplify_gen_unary (ZERO_EXTEND, word_mode,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment