Commit 97ea7176 by Charles M. Hannum Committed by Jeff Law

fold-const.c (fold_truthop): Mask the lhs and rhs after merging adjacent bitfield references.

        * fold-const.c (fold_truthop): Mask the lhs and rhs after merging
        adjacent bitfield references.

From-SVN: r26004
parent 2d490c9e
...@@ -65,6 +65,9 @@ Fri Mar 26 10:43:47 1999 Nick Clifton <nickc@cygnus.com> ...@@ -65,6 +65,9 @@ Fri Mar 26 10:43:47 1999 Nick Clifton <nickc@cygnus.com>
Fri Mar 26 01:59:15 1999 "Charles M. Hannum" <root@ihack.net> Fri Mar 26 01:59:15 1999 "Charles M. Hannum" <root@ihack.net>
* fold-const.c (fold_truthop): Mask the lhs and rhs after merging
adjacent bitfield references.
* fold-const.c (fold_truthop): Verify that the lhs and rhs are * fold-const.c (fold_truthop): Verify that the lhs and rhs are
in the same bit position when optimizing bitfield references in the same bit position when optimizing bitfield references
which have the same mask. which have the same mask.
......
...@@ -3956,20 +3956,32 @@ fold_truthop (code, truth_type, lhs, rhs) ...@@ -3956,20 +3956,32 @@ fold_truthop (code, truth_type, lhs, rhs)
/* There is still another way we can do something: If both pairs of /* There is still another way we can do something: If both pairs of
fields being compared are adjacent, we may be able to make a wider fields being compared are adjacent, we may be able to make a wider
field containing them both. */ field containing them both.
Note that we still must mask the lhs/rhs expressions. Furthermore,
the mask must be shifted to account for the shift done by
make_bit_field_ref. */
if ((ll_bitsize + ll_bitpos == rl_bitpos if ((ll_bitsize + ll_bitpos == rl_bitpos
&& lr_bitsize + lr_bitpos == rr_bitpos) && lr_bitsize + lr_bitpos == rr_bitpos)
|| (ll_bitpos == rl_bitpos + rl_bitsize || (ll_bitpos == rl_bitpos + rl_bitsize
&& lr_bitpos == rr_bitpos + rr_bitsize)) && lr_bitpos == rr_bitpos + rr_bitsize))
return build (wanted_code, truth_type, {
make_bit_field_ref (ll_inner, type, lhs = make_bit_field_ref (ll_inner, type, ll_bitsize + rl_bitsize,
ll_bitsize + rl_bitsize, MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
MIN (ll_bitpos, rl_bitpos), ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
ll_unsignedp), size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
make_bit_field_ref (lr_inner, type, if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
lr_bitsize + rr_bitsize, lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
MIN (lr_bitpos, rr_bitpos),
lr_unsignedp)); rhs = make_bit_field_ref (lr_inner, type, lr_bitsize + rr_bitsize,
MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
return build (wanted_code, truth_type, lhs, rhs);
}
return 0; return 0;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment