Commit 7f679e47 by Richard Sandiford Committed by Richard Sandiford

poly_int: store_field & co

This patch makes store_field and related routines use poly_ints
for bit positions and sizes.  It keeps the existing choices
between signed and unsigned types (there are a mixture of both).

2017-12-20  Richard Sandiford  <richard.sandiford@linaro.org>
	    Alan Hayward  <alan.hayward@arm.com>
	    David Sherwood  <david.sherwood@arm.com>

gcc/
	* expr.c (store_constructor_field): Change bitsize from a
	unsigned HOST_WIDE_INT to a poly_uint64 and bitpos from a
	HOST_WIDE_INT to a poly_int64.
	(store_constructor): Change size from a HOST_WIDE_INT to
	a poly_int64.
	(store_field): Likewise bitsize and bitpos.

Co-Authored-By: Alan Hayward <alan.hayward@arm.com>
Co-Authored-By: David Sherwood <david.sherwood@arm.com>

From-SVN: r255880
parent 8c59e5e7
...@@ -2,6 +2,17 @@ ...@@ -2,6 +2,17 @@
Alan Hayward <alan.hayward@arm.com> Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com> David Sherwood <david.sherwood@arm.com>
* expr.c (store_constructor_field): Change bitsize from a
unsigned HOST_WIDE_INT to a poly_uint64 and bitpos from a
HOST_WIDE_INT to a poly_int64.
(store_constructor): Change size from a HOST_WIDE_INT to
a poly_int64.
(store_field): Likewise bitsize and bitpos.
2017-12-20 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
* expmed.h (store_bit_field): Change bitregion_start and * expmed.h (store_bit_field): Change bitregion_start and
bitregion_end from unsigned HOST_WIDE_INT to poly_uint64. bitregion_end from unsigned HOST_WIDE_INT to poly_uint64.
* expmed.c (adjust_bit_field_mem_for_reg, strict_volatile_bitfield_p) * expmed.c (adjust_bit_field_mem_for_reg, strict_volatile_bitfield_p)
...@@ -79,9 +79,8 @@ static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); ...@@ -79,9 +79,8 @@ static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
static rtx_insn *compress_float_constant (rtx, rtx); static rtx_insn *compress_float_constant (rtx, rtx);
static rtx get_subtarget (rtx); static rtx get_subtarget (rtx);
static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool); static void store_constructor (tree, rtx, int, poly_int64, bool);
static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64,
poly_uint64, poly_uint64,
machine_mode, tree, alias_set_type, bool, bool); machine_mode, tree, alias_set_type, bool, bool);
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree); static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
...@@ -6103,31 +6102,34 @@ all_zeros_p (const_tree exp) ...@@ -6103,31 +6102,34 @@ all_zeros_p (const_tree exp)
clear a substructure if the outer structure has already been cleared. */ clear a substructure if the outer structure has already been cleared. */
static void static void
store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize, store_constructor_field (rtx target, poly_uint64 bitsize, poly_int64 bitpos,
HOST_WIDE_INT bitpos,
poly_uint64 bitregion_start, poly_uint64 bitregion_start,
poly_uint64 bitregion_end, poly_uint64 bitregion_end,
machine_mode mode, machine_mode mode,
tree exp, int cleared, tree exp, int cleared,
alias_set_type alias_set, bool reverse) alias_set_type alias_set, bool reverse)
{ {
poly_int64 bytepos;
poly_uint64 bytesize;
if (TREE_CODE (exp) == CONSTRUCTOR if (TREE_CODE (exp) == CONSTRUCTOR
/* We can only call store_constructor recursively if the size and /* We can only call store_constructor recursively if the size and
bit position are on a byte boundary. */ bit position are on a byte boundary. */
&& bitpos % BITS_PER_UNIT == 0 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
&& (bitsize > 0 && bitsize % BITS_PER_UNIT == 0) && maybe_ne (bitsize, 0U)
&& multiple_p (bitsize, BITS_PER_UNIT, &bytesize)
/* If we have a nonzero bitpos for a register target, then we just /* If we have a nonzero bitpos for a register target, then we just
let store_field do the bitfield handling. This is unlikely to let store_field do the bitfield handling. This is unlikely to
generate unnecessary clear instructions anyways. */ generate unnecessary clear instructions anyways. */
&& (bitpos == 0 || MEM_P (target))) && (known_eq (bitpos, 0) || MEM_P (target)))
{ {
if (MEM_P (target)) if (MEM_P (target))
target {
= adjust_address (target, machine_mode target_mode = GET_MODE (target);
GET_MODE (target) == BLKmode if (target_mode != BLKmode
|| (bitpos && !multiple_p (bitpos, GET_MODE_ALIGNMENT (target_mode)))
% GET_MODE_ALIGNMENT (GET_MODE (target))) != 0 target_mode = BLKmode;
? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT); target = adjust_address (target, target_mode, bytepos);
}
/* Update the alias set, if required. */ /* Update the alias set, if required. */
...@@ -6138,8 +6140,7 @@ store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize, ...@@ -6138,8 +6140,7 @@ store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
set_mem_alias_set (target, alias_set); set_mem_alias_set (target, alias_set);
} }
store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT, store_constructor (exp, target, cleared, bytesize, reverse);
reverse);
} }
else else
store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode, store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
...@@ -6173,12 +6174,12 @@ fields_length (const_tree type) ...@@ -6173,12 +6174,12 @@ fields_length (const_tree type)
If REVERSE is true, the store is to be done in reverse order. */ If REVERSE is true, the store is to be done in reverse order. */
static void static void
store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size, store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
bool reverse) bool reverse)
{ {
tree type = TREE_TYPE (exp); tree type = TREE_TYPE (exp);
HOST_WIDE_INT exp_size = int_size_in_bytes (type); HOST_WIDE_INT exp_size = int_size_in_bytes (type);
HOST_WIDE_INT bitregion_end = size > 0 ? size * BITS_PER_UNIT - 1 : 0; poly_int64 bitregion_end = known_gt (size, 0) ? size * BITS_PER_UNIT - 1 : 0;
switch (TREE_CODE (type)) switch (TREE_CODE (type))
{ {
...@@ -6193,7 +6194,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size, ...@@ -6193,7 +6194,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
reverse = TYPE_REVERSE_STORAGE_ORDER (type); reverse = TYPE_REVERSE_STORAGE_ORDER (type);
/* If size is zero or the target is already cleared, do nothing. */ /* If size is zero or the target is already cleared, do nothing. */
if (size == 0 || cleared) if (known_eq (size, 0) || cleared)
cleared = 1; cleared = 1;
/* We either clear the aggregate or indicate the value is dead. */ /* We either clear the aggregate or indicate the value is dead. */
else if ((TREE_CODE (type) == UNION_TYPE else if ((TREE_CODE (type) == UNION_TYPE
...@@ -6222,14 +6223,14 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size, ...@@ -6222,14 +6223,14 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
the whole structure first. Don't do this if TARGET is a the whole structure first. Don't do this if TARGET is a
register whose mode size isn't equal to SIZE since register whose mode size isn't equal to SIZE since
clear_storage can't handle this case. */ clear_storage can't handle this case. */
else if (size > 0 else if (known_size_p (size)
&& (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type)) && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type))
|| mostly_zeros_p (exp)) || mostly_zeros_p (exp))
&& (!REG_P (target) && (!REG_P (target)
|| ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) || known_eq (GET_MODE_SIZE (GET_MODE (target)), size)))
== size)))
{ {
clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); clear_storage (target, gen_int_mode (size, Pmode),
BLOCK_OP_NORMAL);
cleared = 1; cleared = 1;
} }
...@@ -6410,12 +6411,13 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size, ...@@ -6410,12 +6411,13 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
need_to_clear = 1; need_to_clear = 1;
} }
if (need_to_clear && size > 0) if (need_to_clear && maybe_gt (size, 0))
{ {
if (REG_P (target)) if (REG_P (target))
emit_move_insn (target, CONST0_RTX (GET_MODE (target))); emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
else else
clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); clear_storage (target, gen_int_mode (size, Pmode),
BLOCK_OP_NORMAL);
cleared = 1; cleared = 1;
} }
...@@ -6429,7 +6431,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size, ...@@ -6429,7 +6431,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value) FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
{ {
machine_mode mode; machine_mode mode;
HOST_WIDE_INT bitsize; poly_int64 bitsize;
HOST_WIDE_INT bitpos; HOST_WIDE_INT bitpos;
rtx xtarget = target; rtx xtarget = target;
...@@ -6522,7 +6524,8 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size, ...@@ -6522,7 +6524,8 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
xtarget = adjust_address (xtarget, mode, 0); xtarget = adjust_address (xtarget, mode, 0);
if (TREE_CODE (value) == CONSTRUCTOR) if (TREE_CODE (value) == CONSTRUCTOR)
store_constructor (value, xtarget, cleared, store_constructor (value, xtarget, cleared,
bitsize / BITS_PER_UNIT, reverse); exact_div (bitsize, BITS_PER_UNIT),
reverse);
else else
store_expr (value, xtarget, 0, false, reverse); store_expr (value, xtarget, 0, false, reverse);
...@@ -6691,12 +6694,13 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size, ...@@ -6691,12 +6694,13 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count); need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
} }
if (need_to_clear && size > 0 && !vector) if (need_to_clear && maybe_gt (size, 0) && !vector)
{ {
if (REG_P (target)) if (REG_P (target))
emit_move_insn (target, CONST0_RTX (mode)); emit_move_insn (target, CONST0_RTX (mode));
else else
clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); clear_storage (target, gen_int_mode (size, Pmode),
BLOCK_OP_NORMAL);
cleared = 1; cleared = 1;
} }
...@@ -6784,7 +6788,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size, ...@@ -6784,7 +6788,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
If REVERSE is true, the store is to be done in reverse order. */ If REVERSE is true, the store is to be done in reverse order. */
static rtx static rtx
store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, store_field (rtx target, poly_int64 bitsize, poly_int64 bitpos,
poly_uint64 bitregion_start, poly_uint64 bitregion_end, poly_uint64 bitregion_start, poly_uint64 bitregion_end,
machine_mode mode, tree exp, machine_mode mode, tree exp,
alias_set_type alias_set, bool nontemporal, bool reverse) alias_set_type alias_set, bool nontemporal, bool reverse)
...@@ -6795,7 +6799,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6795,7 +6799,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
/* If we have nothing to store, do nothing unless the expression has /* If we have nothing to store, do nothing unless the expression has
side-effects. Don't do that for zero sized addressable lhs of side-effects. Don't do that for zero sized addressable lhs of
calls. */ calls. */
if (bitsize == 0 if (known_eq (bitsize, 0)
&& (!TREE_ADDRESSABLE (TREE_TYPE (exp)) && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
|| TREE_CODE (exp) != CALL_EXPR)) || TREE_CODE (exp) != CALL_EXPR))
return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL); return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
...@@ -6804,7 +6808,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6804,7 +6808,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
{ {
/* We're storing into a struct containing a single __complex. */ /* We're storing into a struct containing a single __complex. */
gcc_assert (!bitpos); gcc_assert (known_eq (bitpos, 0));
return store_expr (exp, target, 0, nontemporal, reverse); return store_expr (exp, target, 0, nontemporal, reverse);
} }
...@@ -6812,6 +6816,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6812,6 +6816,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
is a bit field, we cannot use addressing to access it. is a bit field, we cannot use addressing to access it.
Use bit-field techniques or SUBREG to store in it. */ Use bit-field techniques or SUBREG to store in it. */
poly_int64 decl_bitsize;
if (mode == VOIDmode if (mode == VOIDmode
|| (mode != BLKmode && ! direct_store[(int) mode] || (mode != BLKmode && ! direct_store[(int) mode]
&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
...@@ -6822,21 +6827,23 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6822,21 +6827,23 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
store it as a bit field. */ store it as a bit field. */
|| (mode != BLKmode || (mode != BLKmode
&& ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)) && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
|| bitpos % GET_MODE_ALIGNMENT (mode)) || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
&& targetm.slow_unaligned_access (mode, MEM_ALIGN (target))) && targetm.slow_unaligned_access (mode, MEM_ALIGN (target)))
|| (bitpos % BITS_PER_UNIT != 0))) || !multiple_p (bitpos, BITS_PER_UNIT)))
|| (bitsize >= 0 && mode != BLKmode || (known_size_p (bitsize)
&& GET_MODE_BITSIZE (mode) > bitsize) && mode != BLKmode
&& maybe_gt (GET_MODE_BITSIZE (mode), bitsize))
/* If the RHS and field are a constant size and the size of the /* If the RHS and field are a constant size and the size of the
RHS isn't the same size as the bitfield, we must use bitfield RHS isn't the same size as the bitfield, we must use bitfield
operations. */ operations. */
|| (bitsize >= 0 || (known_size_p (bitsize)
&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
&& compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
bitsize)
/* Except for initialization of full bytes from a CONSTRUCTOR, which /* Except for initialization of full bytes from a CONSTRUCTOR, which
we will handle specially below. */ we will handle specially below. */
&& !(TREE_CODE (exp) == CONSTRUCTOR && !(TREE_CODE (exp) == CONSTRUCTOR
&& bitsize % BITS_PER_UNIT == 0) && multiple_p (bitsize, BITS_PER_UNIT))
/* And except for bitwise copying of TREE_ADDRESSABLE types, /* And except for bitwise copying of TREE_ADDRESSABLE types,
where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp) where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
includes some extra padding. store_expr / expand_expr will in includes some extra padding. store_expr / expand_expr will in
...@@ -6847,14 +6854,14 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6847,14 +6854,14 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
get_base_address needs to live in memory. */ get_base_address needs to live in memory. */
&& (!TREE_ADDRESSABLE (TREE_TYPE (exp)) && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
|| TREE_CODE (exp) != COMPONENT_REF || TREE_CODE (exp) != COMPONENT_REF
|| TREE_CODE (DECL_SIZE (TREE_OPERAND (exp, 1))) != INTEGER_CST || !multiple_p (bitsize, BITS_PER_UNIT)
|| (bitsize % BITS_PER_UNIT != 0) || !multiple_p (bitpos, BITS_PER_UNIT)
|| (bitpos % BITS_PER_UNIT != 0) || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp, 1)),
|| (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)), bitsize) &decl_bitsize)
!= 0))) || maybe_ne (decl_bitsize, bitsize)))
/* If we are expanding a MEM_REF of a non-BLKmode non-addressable /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
decl we must use bitfield operations. */ decl we must use bitfield operations. */
|| (bitsize >= 0 || (known_size_p (bitsize)
&& TREE_CODE (exp) == MEM_REF && TREE_CODE (exp) == MEM_REF
&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
&& DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
...@@ -6875,17 +6882,23 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6875,17 +6882,23 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
tree type = TREE_TYPE (exp); tree type = TREE_TYPE (exp);
if (INTEGRAL_TYPE_P (type) if (INTEGRAL_TYPE_P (type)
&& TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type)) && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
&& bitsize == TYPE_PRECISION (type)) && known_eq (bitsize, TYPE_PRECISION (type)))
{ {
tree op = gimple_assign_rhs1 (nop_def); tree op = gimple_assign_rhs1 (nop_def);
type = TREE_TYPE (op); type = TREE_TYPE (op);
if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize) if (INTEGRAL_TYPE_P (type)
&& known_ge (TYPE_PRECISION (type), bitsize))
exp = op; exp = op;
} }
} }
temp = expand_normal (exp); temp = expand_normal (exp);
/* We don't support variable-sized BLKmode bitfields, since our
handling of BLKmode is bound up with the ability to break
things into words. */
gcc_assert (mode != BLKmode || bitsize.is_constant ());
/* Handle calls that return values in multiple non-contiguous locations. /* Handle calls that return values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */ The Irix 6 ABI has examples of this. */
if (GET_CODE (temp) == PARALLEL) if (GET_CODE (temp) == PARALLEL)
...@@ -6926,9 +6939,11 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6926,9 +6939,11 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
if (reverse) if (reverse)
temp = flip_storage_order (temp_mode, temp); temp = flip_storage_order (temp_mode, temp);
if (bitsize < size gcc_checking_assert (known_le (bitsize, size));
if (maybe_lt (bitsize, size)
&& reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
&& !(mode == BLKmode && bitsize > BITS_PER_WORD)) /* Use of to_constant for BLKmode was checked above. */
&& !(mode == BLKmode && bitsize.to_constant () > BITS_PER_WORD))
temp = expand_shift (RSHIFT_EXPR, temp_mode, temp, temp = expand_shift (RSHIFT_EXPR, temp_mode, temp,
size - bitsize, NULL_RTX, 1); size - bitsize, NULL_RTX, 1);
} }
...@@ -6945,16 +6960,16 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6945,16 +6960,16 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
&& (GET_MODE (target) == BLKmode && (GET_MODE (target) == BLKmode
|| (MEM_P (target) || (MEM_P (target)
&& GET_MODE_CLASS (GET_MODE (target)) == MODE_INT && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
&& (bitpos % BITS_PER_UNIT) == 0 && multiple_p (bitpos, BITS_PER_UNIT)
&& (bitsize % BITS_PER_UNIT) == 0))) && multiple_p (bitsize, BITS_PER_UNIT))))
{ {
gcc_assert (MEM_P (target) && MEM_P (temp) gcc_assert (MEM_P (target) && MEM_P (temp));
&& (bitpos % BITS_PER_UNIT) == 0); poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); target = adjust_address (target, VOIDmode, bytepos);
emit_block_move (target, temp, emit_block_move (target, temp,
GEN_INT ((bitsize + BITS_PER_UNIT - 1) gen_int_mode (bytesize, Pmode),
/ BITS_PER_UNIT),
BLOCK_OP_NORMAL); BLOCK_OP_NORMAL);
return const0_rtx; return const0_rtx;
...@@ -6962,7 +6977,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6962,7 +6977,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
/* If the mode of TEMP is still BLKmode and BITSIZE not larger than the /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
word size, we need to load the value (see again store_bit_field). */ word size, we need to load the value (see again store_bit_field). */
if (GET_MODE (temp) == BLKmode && bitsize <= BITS_PER_WORD) if (GET_MODE (temp) == BLKmode && known_le (bitsize, BITS_PER_WORD))
{ {
scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize); scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize);
temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode, temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
...@@ -6979,7 +6994,8 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6979,7 +6994,8 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
else else
{ {
/* Now build a reference to just the desired component. */ /* Now build a reference to just the desired component. */
rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT); rtx to_rtx = adjust_address (target, mode,
exact_div (bitpos, BITS_PER_UNIT));
if (to_rtx == target) if (to_rtx == target)
to_rtx = copy_rtx (to_rtx); to_rtx = copy_rtx (to_rtx);
...@@ -6989,10 +7005,10 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -6989,10 +7005,10 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
/* Above we avoided using bitfield operations for storing a CONSTRUCTOR /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
into a target smaller than its type; handle that case now. */ into a target smaller than its type; handle that case now. */
if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0) if (TREE_CODE (exp) == CONSTRUCTOR && known_size_p (bitsize))
{ {
gcc_assert (bitsize % BITS_PER_UNIT == 0); poly_int64 bytesize = exact_div (bitsize, BITS_PER_UNIT);
store_constructor (exp, to_rtx, 0, bitsize / BITS_PER_UNIT, reverse); store_constructor (exp, to_rtx, 0, bytesize, reverse);
return to_rtx; return to_rtx;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment