Commit 39bb8924 by Richard Sandiford Committed by Richard Sandiford

poly_int: get_bit_range

This patch makes get_bit_range return the range and position as poly_ints.

2017-12-21  Richard Sandiford  <richard.sandiford@linaro.org>
	    Alan Hayward  <alan.hayward@arm.com>
	    David Sherwood  <david.sherwood@arm.com>

gcc/
	* expr.h (get_bit_range): Return the bitstart and bitend as
	poly_uint64s rather than unsigned HOST_WIDE_INTs.  Return the bitpos
	as a poly_int64 rather than a HOST_WIDE_INT.
	* expr.c (get_bit_range): Likewise.
	(expand_assignment): Update call accordingly.
	* fold-const.c (optimize_bit_field_compare): Likewise.

Co-Authored-By: Alan Hayward <alan.hayward@arm.com>
Co-Authored-By: David Sherwood <david.sherwood@arm.com>

From-SVN: r255912
parent a97d8b98
...@@ -2,6 +2,17 @@ ...@@ -2,6 +2,17 @@
Alan Hayward <alan.hayward@arm.com> Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com> David Sherwood <david.sherwood@arm.com>
* expr.h (get_bit_range): Return the bitstart and bitend as
poly_uint64s rather than unsigned HOST_WIDE_INTs. Return the bitpos
as a poly_int64 rather than a HOST_WIDE_INT.
* expr.c (get_bit_range): Likewise.
(expand_assignment): Update call accordingly.
* fold-const.c (optimize_bit_field_compare): Likewise.
2017-12-21 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
* config/aarch64/aarch64-protos.h (aarch64_addr_query_type): New enum. * config/aarch64/aarch64-protos.h (aarch64_addr_query_type): New enum.
(aarch64_legitimate_address_p): Use it instead of an rtx code, (aarch64_legitimate_address_p): Use it instead of an rtx code,
as an optional final parameter. as an optional final parameter.
...@@ -4809,13 +4809,10 @@ optimize_bitfield_assignment_op (poly_uint64 pbitsize, ...@@ -4809,13 +4809,10 @@ optimize_bitfield_assignment_op (poly_uint64 pbitsize,
*BITSTART and *BITEND. */ *BITSTART and *BITEND. */
void void
get_bit_range (unsigned HOST_WIDE_INT *bitstart, get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp,
unsigned HOST_WIDE_INT *bitend, poly_int64_pod *bitpos, tree *offset)
tree exp,
HOST_WIDE_INT *bitpos,
tree *offset)
{ {
HOST_WIDE_INT bitoffset; poly_int64 bitoffset;
tree field, repr; tree field, repr;
gcc_assert (TREE_CODE (exp) == COMPONENT_REF); gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
...@@ -4836,13 +4833,13 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart, ...@@ -4836,13 +4833,13 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart,
if (handled_component_p (TREE_OPERAND (exp, 0))) if (handled_component_p (TREE_OPERAND (exp, 0)))
{ {
machine_mode rmode; machine_mode rmode;
HOST_WIDE_INT rbitsize, rbitpos; poly_int64 rbitsize, rbitpos;
tree roffset; tree roffset;
int unsignedp, reversep, volatilep = 0; int unsignedp, reversep, volatilep = 0;
get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos, get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
&roffset, &rmode, &unsignedp, &reversep, &roffset, &rmode, &unsignedp, &reversep,
&volatilep); &volatilep);
if ((rbitpos % BITS_PER_UNIT) != 0) if (!multiple_p (rbitpos, BITS_PER_UNIT))
{ {
*bitstart = *bitend = 0; *bitstart = *bitend = 0;
return; return;
...@@ -4853,10 +4850,10 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart, ...@@ -4853,10 +4850,10 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart,
relative to the representative. DECL_FIELD_OFFSET of field and relative to the representative. DECL_FIELD_OFFSET of field and
repr are the same by construction if they are not constants, repr are the same by construction if they are not constants,
see finish_bitfield_layout. */ see finish_bitfield_layout. */
if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)) poly_uint64 field_offset, repr_offset;
&& tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr))) if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field)) && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
- tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT; bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
else else
bitoffset = 0; bitoffset = 0;
bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
...@@ -4865,17 +4862,16 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart, ...@@ -4865,17 +4862,16 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart,
/* If the adjustment is larger than bitpos, we would have a negative bit /* If the adjustment is larger than bitpos, we would have a negative bit
position for the lower bound and this may wreak havoc later. Adjust position for the lower bound and this may wreak havoc later. Adjust
offset and bitpos to make the lower bound non-negative in that case. */ offset and bitpos to make the lower bound non-negative in that case. */
if (bitoffset > *bitpos) if (maybe_gt (bitoffset, *bitpos))
{ {
HOST_WIDE_INT adjust = bitoffset - *bitpos; poly_int64 adjust_bits = upper_bound (bitoffset, *bitpos) - *bitpos;
gcc_assert ((adjust % BITS_PER_UNIT) == 0); poly_int64 adjust_bytes = exact_div (adjust_bits, BITS_PER_UNIT);
*bitpos += adjust; *bitpos += adjust_bits;
if (*offset == NULL_TREE) if (*offset == NULL_TREE)
*offset = size_int (-adjust / BITS_PER_UNIT); *offset = size_int (-adjust_bytes);
else else
*offset *offset = size_binop (MINUS_EXPR, *offset, size_int (adjust_bytes));
= size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
*bitstart = 0; *bitstart = 0;
} }
else else
...@@ -4988,9 +4984,9 @@ expand_assignment (tree to, tree from, bool nontemporal) ...@@ -4988,9 +4984,9 @@ expand_assignment (tree to, tree from, bool nontemporal)
|| TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
{ {
machine_mode mode1; machine_mode mode1;
HOST_WIDE_INT bitsize, bitpos; poly_int64 bitsize, bitpos;
unsigned HOST_WIDE_INT bitregion_start = 0; poly_uint64 bitregion_start = 0;
unsigned HOST_WIDE_INT bitregion_end = 0; poly_uint64 bitregion_end = 0;
tree offset; tree offset;
int unsignedp, reversep, volatilep = 0; int unsignedp, reversep, volatilep = 0;
tree tem; tree tem;
...@@ -5000,11 +4996,11 @@ expand_assignment (tree to, tree from, bool nontemporal) ...@@ -5000,11 +4996,11 @@ expand_assignment (tree to, tree from, bool nontemporal)
&unsignedp, &reversep, &volatilep); &unsignedp, &reversep, &volatilep);
/* Make sure bitpos is not negative, it can wreak havoc later. */ /* Make sure bitpos is not negative, it can wreak havoc later. */
if (bitpos < 0) if (maybe_lt (bitpos, 0))
{ {
gcc_assert (offset == NULL_TREE); gcc_assert (offset == NULL_TREE);
offset = size_int (bitpos >> LOG2_BITS_PER_UNIT); offset = size_int (bits_to_bytes_round_down (bitpos));
bitpos &= BITS_PER_UNIT - 1; bitpos = num_trailing_bits (bitpos);
} }
if (TREE_CODE (to) == COMPONENT_REF if (TREE_CODE (to) == COMPONENT_REF
...@@ -5014,9 +5010,9 @@ expand_assignment (tree to, tree from, bool nontemporal) ...@@ -5014,9 +5010,9 @@ expand_assignment (tree to, tree from, bool nontemporal)
However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
BITSIZE are not byte-aligned, there is no need to limit the range BITSIZE are not byte-aligned, there is no need to limit the range
we can access. This can occur with packed structures in Ada. */ we can access. This can occur with packed structures in Ada. */
else if (bitsize > 0 else if (maybe_gt (bitsize, 0)
&& bitsize % BITS_PER_UNIT == 0 && multiple_p (bitsize, BITS_PER_UNIT)
&& bitpos % BITS_PER_UNIT == 0) && multiple_p (bitpos, BITS_PER_UNIT))
{ {
bitregion_start = bitpos; bitregion_start = bitpos;
bitregion_end = bitpos + bitsize - 1; bitregion_end = bitpos + bitsize - 1;
...@@ -5078,16 +5074,18 @@ expand_assignment (tree to, tree from, bool nontemporal) ...@@ -5078,16 +5074,18 @@ expand_assignment (tree to, tree from, bool nontemporal)
This is only done for aligned data values, as these can This is only done for aligned data values, as these can
be expected to result in single move instructions. */ be expected to result in single move instructions. */
poly_int64 bytepos;
if (mode1 != VOIDmode if (mode1 != VOIDmode
&& bitpos != 0 && maybe_ne (bitpos, 0)
&& bitsize > 0 && maybe_gt (bitsize, 0)
&& (bitpos % bitsize) == 0 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 && multiple_p (bitpos, bitsize)
&& multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
&& MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1)) && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
{ {
to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); to_rtx = adjust_address (to_rtx, mode1, bytepos);
bitregion_start = 0; bitregion_start = 0;
if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos) if (known_ge (bitregion_end, poly_uint64 (bitpos)))
bitregion_end -= bitpos; bitregion_end -= bitpos;
bitpos = 0; bitpos = 0;
} }
...@@ -5102,8 +5100,7 @@ expand_assignment (tree to, tree from, bool nontemporal) ...@@ -5102,8 +5100,7 @@ expand_assignment (tree to, tree from, bool nontemporal)
code contains an out-of-bounds access to a small array. */ code contains an out-of-bounds access to a small array. */
if (!MEM_P (to_rtx) if (!MEM_P (to_rtx)
&& GET_MODE (to_rtx) != BLKmode && GET_MODE (to_rtx) != BLKmode
&& (unsigned HOST_WIDE_INT) bitpos && known_ge (bitpos, GET_MODE_PRECISION (GET_MODE (to_rtx))))
>= GET_MODE_PRECISION (GET_MODE (to_rtx)))
{ {
expand_normal (from); expand_normal (from);
result = NULL; result = NULL;
...@@ -5114,25 +5111,26 @@ expand_assignment (tree to, tree from, bool nontemporal) ...@@ -5114,25 +5111,26 @@ expand_assignment (tree to, tree from, bool nontemporal)
unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx)); unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE (to_rtx) if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE (to_rtx)
&& COMPLEX_MODE_P (GET_MODE (to_rtx)) && COMPLEX_MODE_P (GET_MODE (to_rtx))
&& bitpos == 0 && known_eq (bitpos, 0)
&& bitsize == mode_bitsize) && known_eq (bitsize, mode_bitsize))
result = store_expr (from, to_rtx, false, nontemporal, reversep); result = store_expr (from, to_rtx, false, nontemporal, reversep);
else if (bitsize == mode_bitsize / 2 else if (known_eq (bitsize, mode_bitsize / 2)
&& (bitpos == 0 || bitpos == mode_bitsize / 2)) && (known_eq (bitpos, 0)
result = store_expr (from, XEXP (to_rtx, bitpos != 0), false, || known_eq (bitpos, mode_bitsize / 2)))
nontemporal, reversep); result = store_expr (from, XEXP (to_rtx, maybe_ne (bitpos, 0)),
else if (bitpos + bitsize <= mode_bitsize / 2) false, nontemporal, reversep);
else if (known_le (bitpos + bitsize, mode_bitsize / 2))
result = store_field (XEXP (to_rtx, 0), bitsize, bitpos, result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
bitregion_start, bitregion_end, bitregion_start, bitregion_end,
mode1, from, get_alias_set (to), mode1, from, get_alias_set (to),
nontemporal, reversep); nontemporal, reversep);
else if (bitpos >= mode_bitsize / 2) else if (known_ge (bitpos, mode_bitsize / 2))
result = store_field (XEXP (to_rtx, 1), bitsize, result = store_field (XEXP (to_rtx, 1), bitsize,
bitpos - mode_bitsize / 2, bitpos - mode_bitsize / 2,
bitregion_start, bitregion_end, bitregion_start, bitregion_end,
mode1, from, get_alias_set (to), mode1, from, get_alias_set (to),
nontemporal, reversep); nontemporal, reversep);
else if (bitpos == 0 && bitsize == mode_bitsize) else if (known_eq (bitpos, 0) && known_eq (bitsize, mode_bitsize))
{ {
result = expand_normal (from); result = expand_normal (from);
if (GET_CODE (result) == CONCAT) if (GET_CODE (result) == CONCAT)
......
...@@ -240,17 +240,8 @@ extern bool emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int, ...@@ -240,17 +240,8 @@ extern bool emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int,
int, rtx, int, rtx, rtx, int, rtx, bool); int, rtx, int, rtx, rtx, int, rtx, bool);
/* Extract the accessible bit-range from a COMPONENT_REF. */ /* Extract the accessible bit-range from a COMPONENT_REF. */
extern void get_bit_range (unsigned HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, extern void get_bit_range (poly_uint64_pod *, poly_uint64_pod *, tree,
tree, HOST_WIDE_INT *, tree *); poly_int64_pod *, tree *);
/* Temporary. */
inline void
get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp,
poly_int64_pod *bitpos, tree *offset)
{
get_bit_range (&bitstart->coeffs[0], &bitend->coeffs[0], exp,
&bitpos->coeffs[0], offset);
}
/* Expand an assignment that stores the value of FROM into TO. */ /* Expand an assignment that stores the value of FROM into TO. */
extern void expand_assignment (tree, tree, bool); extern void expand_assignment (tree, tree, bool);
......
...@@ -4076,12 +4076,13 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, ...@@ -4076,12 +4076,13 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
} }
/* Honor the C++ memory model and mimic what RTL expansion does. */ /* Honor the C++ memory model and mimic what RTL expansion does. */
unsigned HOST_WIDE_INT bitstart = 0; poly_uint64 bitstart = 0;
unsigned HOST_WIDE_INT bitend = 0; poly_uint64 bitend = 0;
if (TREE_CODE (lhs) == COMPONENT_REF) if (TREE_CODE (lhs) == COMPONENT_REF)
{ {
get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset); poly_int64 plbitpos;
if (offset != NULL_TREE) get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
return 0; return 0;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment