Commit 19caa751 by Richard Kenner Committed by Richard Kenner

builtins.c (get_pointer_alignment): Use host_integerp & tree_low_cst.

	* builtins.c (get_pointer_alignment): Use host_integerp & tree_low_cst.
	(expand_builtin_apply): Pass alignment to emit_block_move in bits.
	(expand_builtin_memcpy, expand_builtin_va_copy): Likewise.
	(expand_builtin_memset): Likewise, but to clear_storage.
	* calls.c (save_fixed_argument_area): Likewise, to move_by_pieces.
	(restore_fixed_argument_area): Likewise.
	(store_unaligned_arguments_into_pseudos): Likewise, to store_bit_field.
	(load_register_parameters): Likewise, to emit_group_load.
	(expand_call): Likewise, to emit_group_store and emit_block_move.
	(emit_library_call_value_1): Likewise, to emit_block_move.
	(store_one_arg): Likewise, and to emit_push_insn.
	* expmed.c (extract_bit_field): Alignment is in bits, not bytes.
	(extract_fixed_bit_field, extract_split_bit_field): Likewise.
	* expr.c (move_by_pieces, move_by_pieces_ninsns): Likewise.
	(emit_block_move, emit_group_load, emit_group_store): Likewise.
	(clear_by_pieces, clear_storage, emit_push_insn): Likewise.
	(expand_assigment, store_expr, store_constructor_field): Likewise.
	(expand_expr_unaligned, do_jump, do_compare_and_jump): Likewise.
	(store_constructor, store_field, get_inner_reference): Likewise.
	Use host_integerp and tree_low_cst; sizes and positions HOST_WIDE_INT.
	(expand_expr, case COMPONENT_REF): Likewise.
	(copy_blkmode_from_regs): Use UNSIGNED_HOST_WIDE_INT for sizes
	and positions; reindent code.
	* expr.h (emit_cmp_insn, emit_cmp_and_jump_insns): Alignment unsigned.
	* function.c (purge_addressof_1): Pass bit align to store_bit_field.
	(assign_parms): Likewise to emit_group_store.
	* optbas.c (prepare_cmp_insn): Alignment is in bits.
	(emit_cmp_and_jump_insns, emit_cmp_insn): Likewise, and also unsigned.
	* stmt.c (expand_value_return): Pass align in bits to emit_group_load.
	(expand_return): Likewise to {extract,store}_bit_field.
	* stor-layout.c (get_mode_alignment): Minor cleanup.
	* config/rs6000/rs6000.h (SLOW_UNALIGNED_ACCESS): Align is in bits.
	* config/sh/sh.h (MOVE_BY_PIECES_P): Likewise.
	* ch/expr.c (chill_expand_expr): Pass bit alignment to emit_block_move.

From-SVN: r32827
parent 2dc4d9f0
Thu Mar 30 06:32:51 2000 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
* builtins.c (get_pointer_alignment): Use host_integerp & tree_low_cst.
(expand_builtin_apply): Pass alignment to emit_block_move in bits.
(expand_builtin_memcpy, expand_builtin_va_copy): Likewise.
(expand_builtin_memset): Likewise, but to clear_storage.
* calls.c (save_fixed_argument_area): Likewise, to move_by_pieces.
(restore_fixed_argument_area): Likewise.
(store_unaligned_arguments_into_pseudos): Likewise, to store_bit_field.
(load_register_parameters): Likewise, to emit_group_load.
(expand_call): Likewise, to emit_group_store and emit_block_move.
(emit_library_call_value_1): Likewise, to emit_block_move.
(store_one_arg): Likewise, and to emit_push_insn.
* expmed.c (extract_bit_field): Alignment is in bits, not bytes.
(extract_fixed_bit_field, extract_split_bit_field): Likewise.
* expr.c (move_by_pieces, move_by_pieces_ninsns): Likewise.
(emit_block_move, emit_group_load, emit_group_store): Likewise.
(clear_by_pieces, clear_storage, emit_push_insn): Likewise.
(expand_assigment, store_expr, store_constructor_field): Likewise.
(expand_expr_unaligned, do_jump, do_compare_and_jump): Likewise.
(store_constructor, store_field, get_inner_reference): Likewise.
Use host_integerp and tree_low_cst; sizes and positions HOST_WIDE_INT.
(expand_expr, case COMPONENT_REF): Likewise.
(copy_blkmode_from_regs): Use UNSIGNED_HOST_WIDE_INT for sizes
and positions; reindent code.
* expr.h (emit_cmp_insn, emit_cmp_and_jump_insns): Alignment unsigned.
* function.c (purge_addressof_1): Pass bit align to store_bit_field.
(assign_parms): Likewise to emit_group_store.
* optbas.c (prepare_cmp_insn): Alignment is in bits.
(emit_cmp_and_jump_insns, emit_cmp_insn): Likewise, and also unsigned.
* stmt.c (expand_value_return): Pass align in bits to emit_group_load.
(expand_return): Likewise to {extract,store}_bit_field.
* stor-layout.c (get_mode_alignment): Minor cleanup.
* config/rs6000/rs6000.h (SLOW_UNALIGNED_ACCESS): Align is in bits.
* config/sh/sh.h (MOVE_BY_PIECES_P): Likewise.
2000-03-29 Zack Weinberg <zack@wolery.cumb.org> 2000-03-29 Zack Weinberg <zack@wolery.cumb.org>
* cppinit.c (cpp_start_read): Call initialize_dependency_output * cppinit.c (cpp_start_read): Call initialize_dependency_output
......
...@@ -135,6 +135,7 @@ get_pointer_alignment (exp, max_align) ...@@ -135,6 +135,7 @@ get_pointer_alignment (exp, max_align)
exp = TREE_OPERAND (exp, 0); exp = TREE_OPERAND (exp, 0);
if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
return align; return align;
inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
align = MIN (inner, max_align); align = MIN (inner, max_align);
break; break;
...@@ -143,10 +144,10 @@ get_pointer_alignment (exp, max_align) ...@@ -143,10 +144,10 @@ get_pointer_alignment (exp, max_align)
/* If sum of pointer + int, restrict our maximum alignment to that /* If sum of pointer + int, restrict our maximum alignment to that
imposed by the integer. If not, we can't do any better than imposed by the integer. If not, we can't do any better than
ALIGN. */ ALIGN. */
if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST) if (! host_integerp (TREE_OPERAND (exp, 1), 1))
return align; return align;
while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT) while (((tree_low_cst (TREE_OPERAND (exp, 1), 1) * BITS_PER_UNIT)
& (max_align - 1)) & (max_align - 1))
!= 0) != 0)
max_align >>= 1; max_align >>= 1;
...@@ -903,8 +904,7 @@ expand_builtin_apply (function, arguments, argsize) ...@@ -903,8 +904,7 @@ expand_builtin_apply (function, arguments, argsize)
dest = allocate_dynamic_stack_space (argsize, 0, 0); dest = allocate_dynamic_stack_space (argsize, 0, 0);
emit_block_move (gen_rtx_MEM (BLKmode, dest), emit_block_move (gen_rtx_MEM (BLKmode, dest),
gen_rtx_MEM (BLKmode, incoming_args), gen_rtx_MEM (BLKmode, incoming_args),
argsize, argsize, PARM_BOUNDARY);
PARM_BOUNDARY / BITS_PER_UNIT);
/* Refer to the argument block. */ /* Refer to the argument block. */
apply_args_size (); apply_args_size ();
...@@ -1435,10 +1435,8 @@ expand_builtin_memcpy (arglist) ...@@ -1435,10 +1435,8 @@ expand_builtin_memcpy (arglist)
tree src = TREE_VALUE (TREE_CHAIN (arglist)); tree src = TREE_VALUE (TREE_CHAIN (arglist));
tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
int src_align int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
= get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
int dest_align
= get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
rtx dest_mem, src_mem, dest_addr, len_rtx; rtx dest_mem, src_mem, dest_addr, len_rtx;
/* If either SRC or DEST is not a pointer type, don't do /* If either SRC or DEST is not a pointer type, don't do
...@@ -1531,8 +1529,7 @@ expand_builtin_memset (exp) ...@@ -1531,8 +1529,7 @@ expand_builtin_memset (exp)
tree val = TREE_VALUE (TREE_CHAIN (arglist)); tree val = TREE_VALUE (TREE_CHAIN (arglist));
tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
int dest_align int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
= get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
rtx dest_mem, dest_addr, len_rtx; rtx dest_mem, dest_addr, len_rtx;
/* If DEST is not a pointer type, don't do this /* If DEST is not a pointer type, don't do this
...@@ -1918,6 +1915,7 @@ stabilize_va_list (valist, needs_lvalue) ...@@ -1918,6 +1915,7 @@ stabilize_va_list (valist, needs_lvalue)
{ {
tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node)); tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
tree p2 = build_pointer_type (va_list_type_node); tree p2 = build_pointer_type (va_list_type_node);
valist = build1 (ADDR_EXPR, p2, valist); valist = build1 (ADDR_EXPR, p2, valist);
valist = fold (build1 (NOP_EXPR, p1, valist)); valist = fold (build1 (NOP_EXPR, p1, valist));
} }
...@@ -2190,8 +2188,7 @@ expand_builtin_va_copy (arglist) ...@@ -2190,8 +2188,7 @@ expand_builtin_va_copy (arglist)
MEM_ALIAS_SET (srcb) = get_alias_set (TREE_TYPE (TREE_TYPE (src))); MEM_ALIAS_SET (srcb) = get_alias_set (TREE_TYPE (TREE_TYPE (src)));
/* Copy. */ /* Copy. */
emit_block_move (dstb, srcb, size, emit_block_move (dstb, srcb, size, TYPE_ALIGN (va_list_type_node));
TYPE_ALIGN (va_list_type_node) / BITS_PER_UNIT);
} }
return const0_rtx; return const0_rtx;
......
...@@ -856,11 +856,11 @@ save_fixed_argument_area (reg_parm_stack_space, argblock, ...@@ -856,11 +856,11 @@ save_fixed_argument_area (reg_parm_stack_space, argblock,
if (save_mode == BLKmode) if (save_mode == BLKmode)
{ {
save_area = assign_stack_temp (BLKmode, num_to_save, 0); save_area = assign_stack_temp (BLKmode, num_to_save, 0);
/* Cannot use emit_block_move here because it can be done by a library /* Cannot use emit_block_move here because it can be done by a
call which in turn gets into this place again and deadly infinite library call which in turn gets into this place again and deadly
recursion happens. */ infinite recursion happens. */
move_by_pieces (validize_mem (save_area), stack_area, num_to_save, move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
PARM_BOUNDARY / BITS_PER_UNIT); PARM_BOUNDARY);
} }
else else
{ {
...@@ -900,8 +900,7 @@ restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save) ...@@ -900,8 +900,7 @@ restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
call which in turn gets into this place again and deadly infinite call which in turn gets into this place again and deadly infinite
recursion happens. */ recursion happens. */
move_by_pieces (stack_area, validize_mem (save_area), move_by_pieces (stack_area, validize_mem (save_area),
high_to_save - low_to_save + 1, high_to_save - low_to_save + 1, PARM_BOUNDARY);
PARM_BOUNDARY / BITS_PER_UNIT);
} }
#endif #endif
...@@ -968,12 +967,10 @@ store_unaligned_arguments_into_pseudos (args, num_actuals) ...@@ -968,12 +967,10 @@ store_unaligned_arguments_into_pseudos (args, num_actuals)
bytes -= bitsize / BITS_PER_UNIT; bytes -= bitsize / BITS_PER_UNIT;
store_bit_field (reg, bitsize, big_endian_correction, word_mode, store_bit_field (reg, bitsize, big_endian_correction, word_mode,
extract_bit_field (word, bitsize, 0, 1, extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
NULL_RTX, word_mode, word_mode, word_mode, bitalign,
word_mode,
bitalign / BITS_PER_UNIT,
BITS_PER_WORD), BITS_PER_WORD),
bitalign / BITS_PER_UNIT, BITS_PER_WORD); bitalign, BITS_PER_WORD);
} }
} }
} }
...@@ -1656,12 +1653,9 @@ load_register_parameters (args, num_actuals, call_fusage) ...@@ -1656,12 +1653,9 @@ load_register_parameters (args, num_actuals, call_fusage)
locations. The Irix 6 ABI has examples of this. */ locations. The Irix 6 ABI has examples of this. */
if (GET_CODE (reg) == PARALLEL) if (GET_CODE (reg) == PARALLEL)
{ emit_group_load (reg, args[i].value,
emit_group_load (reg, args[i].value, int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
int_size_in_bytes (TREE_TYPE (args[i].tree_value)), TYPE_ALIGN (TREE_TYPE (args[i].tree_value)));
(TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
/ BITS_PER_UNIT));
}
/* If simple case, just do move. If normal partial, store_one_arg /* If simple case, just do move. If normal partial, store_one_arg
has already loaded the register for us. In all other cases, has already loaded the register for us. In all other cases,
...@@ -2911,7 +2905,8 @@ expand_call (exp, target, ignore) ...@@ -2911,7 +2905,8 @@ expand_call (exp, target, ignore)
if (! rtx_equal_p (target, valreg)) if (! rtx_equal_p (target, valreg))
emit_group_store (target, valreg, bytes, emit_group_store (target, valreg, bytes,
TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT); TYPE_ALIGN (TREE_TYPE (exp)));
/* We can not support sibling calls for this case. */ /* We can not support sibling calls for this case. */
sibcall_failure = 1; sibcall_failure = 1;
} }
...@@ -2992,7 +2987,7 @@ expand_call (exp, target, ignore) ...@@ -2992,7 +2987,7 @@ expand_call (exp, target, ignore)
emit_block_move (stack_area, emit_block_move (stack_area,
validize_mem (args[i].save_area), validize_mem (args[i].save_area),
GEN_INT (args[i].size.constant), GEN_INT (args[i].size.constant),
PARM_BOUNDARY / BITS_PER_UNIT); PARM_BOUNDARY);
sibcall_failure = 1; sibcall_failure = 1;
} }
...@@ -3474,8 +3469,7 @@ emit_library_call_value_1 (retval, orgfun, value, no_queue, outmode, nargs, p) ...@@ -3474,8 +3469,7 @@ emit_library_call_value_1 (retval, orgfun, value, no_queue, outmode, nargs, p)
{ {
save_area = assign_stack_temp (BLKmode, num_to_save, 0); save_area = assign_stack_temp (BLKmode, num_to_save, 0);
emit_block_move (validize_mem (save_area), stack_area, emit_block_move (validize_mem (save_area), stack_area,
GEN_INT (num_to_save), GEN_INT (num_to_save), PARM_BOUNDARY);
PARM_BOUNDARY / BITS_PER_UNIT);
} }
else else
{ {
...@@ -3540,6 +3534,7 @@ emit_library_call_value_1 (retval, orgfun, value, no_queue, outmode, nargs, p) ...@@ -3540,6 +3534,7 @@ emit_library_call_value_1 (retval, orgfun, value, no_queue, outmode, nargs, p)
emit_move_insn (argvec[argnum].save_area, stack_area); emit_move_insn (argvec[argnum].save_area, stack_area);
} }
} }
emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0, emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
argblock, GEN_INT (argvec[argnum].offset.constant), argblock, GEN_INT (argvec[argnum].offset.constant),
reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad)); reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
...@@ -3684,7 +3679,7 @@ emit_library_call_value_1 (retval, orgfun, value, no_queue, outmode, nargs, p) ...@@ -3684,7 +3679,7 @@ emit_library_call_value_1 (retval, orgfun, value, no_queue, outmode, nargs, p)
else else
emit_block_move (stack_area, validize_mem (save_area), emit_block_move (stack_area, validize_mem (save_area),
GEN_INT (high_to_save - low_to_save + 1), GEN_INT (high_to_save - low_to_save + 1),
PARM_BOUNDARY / BITS_PER_UNIT); PARM_BOUNDARY);
} }
#endif #endif
...@@ -3922,7 +3917,7 @@ store_one_arg (arg, argblock, may_be_alloca, variable_size, ...@@ -3922,7 +3917,7 @@ store_one_arg (arg, argblock, may_be_alloca, variable_size,
preserve_temp_slots (arg->save_area); preserve_temp_slots (arg->save_area);
emit_block_move (validize_mem (arg->save_area), stack_area, emit_block_move (validize_mem (arg->save_area), stack_area,
GEN_INT (arg->size.constant), GEN_INT (arg->size.constant),
PARM_BOUNDARY / BITS_PER_UNIT); PARM_BOUNDARY);
} }
else else
{ {
...@@ -4084,8 +4079,8 @@ store_one_arg (arg, argblock, may_be_alloca, variable_size, ...@@ -4084,8 +4079,8 @@ store_one_arg (arg, argblock, may_be_alloca, variable_size,
} }
emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial, TYPE_ALIGN (TREE_TYPE (pval)), partial, reg, excess,
reg, excess, argblock, ARGS_SIZE_RTX (arg->offset), argblock, ARGS_SIZE_RTX (arg->offset),
reg_parm_stack_space, reg_parm_stack_space,
ARGS_SIZE_RTX (arg->alignment_pad)); ARGS_SIZE_RTX (arg->alignment_pad));
} }
......
Thu Mar 30 06:32:51 2000 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
* expr.c (chill_expand_expr): Pass bit alignment to emit_block_move.
Sat Mar 25 09:12:10 2000 Richard Kenner <kenner@vlsi1.ultra.nyu.edu> Sat Mar 25 09:12:10 2000 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
* actions.c (check_missing_cases): BYTES_NEEDED is HOST_WIDE_INT. * actions.c (check_missing_cases): BYTES_NEEDED is HOST_WIDE_INT.
......
...@@ -362,7 +362,7 @@ chill_expand_expr (exp, target, tmode, modifier) ...@@ -362,7 +362,7 @@ chill_expand_expr (exp, target, tmode, modifier)
if (temp == target || target == NULL_RTX) if (temp == target || target == NULL_RTX)
return temp; return temp;
emit_block_move (target, temp, expr_size (exp0), emit_block_move (target, temp, expr_size (exp0),
TYPE_ALIGN (TREE_TYPE(exp0)) / BITS_PER_UNIT); TYPE_ALIGN (TREE_TYPE(exp0)));
return target; return target;
} }
else else
......
...@@ -618,7 +618,7 @@ extern int rs6000_debug_arg; /* debug argument handling */ ...@@ -618,7 +618,7 @@ extern int rs6000_debug_arg; /* debug argument handling */
#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) \ #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) \
((STRICT_ALIGNMENT \ ((STRICT_ALIGNMENT \
|| (((MODE) == SFmode || (MODE) == DFmode || (MODE) == DImode) \ || (((MODE) == SFmode || (MODE) == DFmode || (MODE) == DImode) \
&& (ALIGN) < 4)) ? 1 : 0) && (ALIGN) < 32)) ? 1 : 0)
/* Standard register usage. */ /* Standard register usage. */
......
...@@ -1183,7 +1183,7 @@ extern int current_function_anonymous_args; ...@@ -1183,7 +1183,7 @@ extern int current_function_anonymous_args;
#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns (SIZE, ALIGN) \ #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns (SIZE, ALIGN) \
< (TARGET_SMALLCODE ? 2 : \ < (TARGET_SMALLCODE ? 2 : \
((ALIGN >= 4) ? 16 : 2))) ((ALIGN >= 32) ? 16 : 2)))
/* Macros to check register numbers against specific register classes. */ /* Macros to check register numbers against specific register classes. */
......
...@@ -947,7 +947,7 @@ store_split_bit_field (op0, bitsize, bitpos, value, align) ...@@ -947,7 +947,7 @@ store_split_bit_field (op0, bitsize, bitpos, value, align)
TMODE is the mode the caller would like the value to have; TMODE is the mode the caller would like the value to have;
but the value may be returned with type MODE instead. but the value may be returned with type MODE instead.
ALIGN is the alignment that STR_RTX is known to have, measured in bytes. ALIGN is the alignment that STR_RTX is known to have.
TOTAL_SIZE is the size in bytes of the containing structure, TOTAL_SIZE is the size in bytes of the containing structure,
or -1 if varying. or -1 if varying.
...@@ -1068,7 +1068,7 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp, ...@@ -1068,7 +1068,7 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
|| (GET_CODE (op0) == MEM || (GET_CODE (op0) == MEM
&& (! SLOW_UNALIGNED_ACCESS (mode, align) && (! SLOW_UNALIGNED_ACCESS (mode, align)
|| (offset * BITS_PER_UNIT % bitsize == 0 || (offset * BITS_PER_UNIT % bitsize == 0
&& align * BITS_PER_UNIT % bitsize == 0)))) && align % bitsize == 0))))
&& ((bitsize >= BITS_PER_WORD && bitsize == GET_MODE_BITSIZE (mode) && ((bitsize >= BITS_PER_WORD && bitsize == GET_MODE_BITSIZE (mode)
&& bitpos % BITS_PER_WORD == 0) && bitpos % BITS_PER_WORD == 0)
|| (mode_for_size (bitsize, GET_MODE_CLASS (tmode), 0) != BLKmode || (mode_for_size (bitsize, GET_MODE_CLASS (tmode), 0) != BLKmode
...@@ -1144,9 +1144,8 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp, ...@@ -1144,9 +1144,8 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
rtx result_part rtx result_part
= extract_bit_field (op0, MIN (BITS_PER_WORD, = extract_bit_field (op0, MIN (BITS_PER_WORD,
bitsize - i * BITS_PER_WORD), bitsize - i * BITS_PER_WORD),
bitnum + bit_offset, bitnum + bit_offset, 1, target_part, mode,
1, target_part, mode, word_mode, word_mode, align, total_size);
align, total_size);
if (target_part == 0) if (target_part == 0)
abort (); abort ();
...@@ -1262,15 +1261,14 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp, ...@@ -1262,15 +1261,14 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
if (GET_MODE (xop0) == BLKmode if (GET_MODE (xop0) == BLKmode
|| (GET_MODE_SIZE (GET_MODE (op0)) || (GET_MODE_SIZE (GET_MODE (op0))
> GET_MODE_SIZE (maxmode))) > GET_MODE_SIZE (maxmode)))
bestmode = get_best_mode (bitsize, bitnum, bestmode = get_best_mode (bitsize, bitnum, align, maxmode,
align * BITS_PER_UNIT, maxmode,
MEM_VOLATILE_P (xop0)); MEM_VOLATILE_P (xop0));
else else
bestmode = GET_MODE (xop0); bestmode = GET_MODE (xop0);
if (bestmode == VOIDmode if (bestmode == VOIDmode
|| (SLOW_UNALIGNED_ACCESS (bestmode, align) || (SLOW_UNALIGNED_ACCESS (bestmode, align)
&& GET_MODE_SIZE (bestmode) > align)) && GET_MODE_BITSIZE (bestmode) > align))
goto extzv_loses; goto extzv_loses;
/* Compute offset as multiple of this unit, /* Compute offset as multiple of this unit,
...@@ -1400,15 +1398,14 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp, ...@@ -1400,15 +1398,14 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
if (GET_MODE (xop0) == BLKmode if (GET_MODE (xop0) == BLKmode
|| (GET_MODE_SIZE (GET_MODE (op0)) || (GET_MODE_SIZE (GET_MODE (op0))
> GET_MODE_SIZE (maxmode))) > GET_MODE_SIZE (maxmode)))
bestmode = get_best_mode (bitsize, bitnum, bestmode = get_best_mode (bitsize, bitnum, align, maxmode,
align * BITS_PER_UNIT, maxmode,
MEM_VOLATILE_P (xop0)); MEM_VOLATILE_P (xop0));
else else
bestmode = GET_MODE (xop0); bestmode = GET_MODE (xop0);
if (bestmode == VOIDmode if (bestmode == VOIDmode
|| (SLOW_UNALIGNED_ACCESS (bestmode, align) || (SLOW_UNALIGNED_ACCESS (bestmode, align)
&& GET_MODE_SIZE (bestmode) > align)) && GET_MODE_BITSIZE (bestmode) > align))
goto extv_loses; goto extv_loses;
/* Compute offset as multiple of this unit, /* Compute offset as multiple of this unit,
...@@ -1538,7 +1535,7 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp, ...@@ -1538,7 +1535,7 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
and return TARGET, but this is not guaranteed. and return TARGET, but this is not guaranteed.
If TARGET is not used, create a pseudo-reg of mode TMODE for the value. If TARGET is not used, create a pseudo-reg of mode TMODE for the value.
ALIGN is the alignment that STR_RTX is known to have, measured in bytes. */ ALIGN is the alignment that STR_RTX is known to have. */
static rtx static rtx
extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos, extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos,
...@@ -1565,8 +1562,8 @@ extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos, ...@@ -1565,8 +1562,8 @@ extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos,
includes the entire field. If such a mode would be larger than includes the entire field. If such a mode would be larger than
a word, we won't be doing the extraction the normal way. */ a word, we won't be doing the extraction the normal way. */
mode = get_best_mode (bitsize, bitpos + offset * BITS_PER_UNIT, mode = get_best_mode (bitsize, bitpos + offset * BITS_PER_UNIT, align,
align * BITS_PER_UNIT, word_mode, word_mode,
GET_CODE (op0) == MEM && MEM_VOLATILE_P (op0)); GET_CODE (op0) == MEM && MEM_VOLATILE_P (op0));
if (mode == VOIDmode) if (mode == VOIDmode)
...@@ -1759,8 +1756,8 @@ lshift_value (mode, value, bitpos, bitsize) ...@@ -1759,8 +1756,8 @@ lshift_value (mode, value, bitpos, bitsize)
BITSIZE is the field width; BITPOS, position of its first bit, in the word. BITSIZE is the field width; BITPOS, position of its first bit, in the word.
UNSIGNEDP is 1 if should zero-extend the contents; else sign-extend. UNSIGNEDP is 1 if should zero-extend the contents; else sign-extend.
ALIGN is the known alignment of OP0, measured in bytes. ALIGN is the known alignment of OP0. This is also the size of the
This is also the size of the memory objects to be used. */ memory objects to be used. */
static rtx static rtx
extract_split_bit_field (op0, bitsize, bitpos, unsignedp, align) extract_split_bit_field (op0, bitsize, bitpos, unsignedp, align)
...@@ -1779,7 +1776,7 @@ extract_split_bit_field (op0, bitsize, bitpos, unsignedp, align) ...@@ -1779,7 +1776,7 @@ extract_split_bit_field (op0, bitsize, bitpos, unsignedp, align)
if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG) if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
unit = BITS_PER_WORD; unit = BITS_PER_WORD;
else else
unit = MIN (align * BITS_PER_UNIT, BITS_PER_WORD); unit = MIN (align, BITS_PER_WORD);
while (bitsdone < bitsize) while (bitsdone < bitsize)
{ {
......
...@@ -793,12 +793,13 @@ extern void emit_0_to_1_insn PARAMS ((rtx)); ...@@ -793,12 +793,13 @@ extern void emit_0_to_1_insn PARAMS ((rtx));
/* Emit one rtl insn to compare two rtx's. */ /* Emit one rtl insn to compare two rtx's. */
extern void emit_cmp_insn PARAMS ((rtx, rtx, enum rtx_code, rtx, extern void emit_cmp_insn PARAMS ((rtx, rtx, enum rtx_code, rtx,
enum machine_mode, int, int)); enum machine_mode, int, unsigned int));
/* Emit a pair of rtl insns to compare two rtx's and to jump /* Emit a pair of rtl insns to compare two rtx's and to jump
to a label if the comparison is true. */ to a label if the comparison is true. */
extern void emit_cmp_and_jump_insns PARAMS ((rtx, rtx, enum rtx_code, rtx, extern void emit_cmp_and_jump_insns PARAMS ((rtx, rtx, enum rtx_code, rtx,
enum machine_mode, int, int, rtx)); enum machine_mode, int,
unsigned int, rtx));
/* The various uses that a comparison can have; used by can_compare_p: /* The various uses that a comparison can have; used by can_compare_p:
jumps, conditional moves, store flag operations. */ jumps, conditional moves, store flag operations. */
......
...@@ -3027,7 +3027,7 @@ purge_addressof_1 (loc, insn, force, store, ht) ...@@ -3027,7 +3027,7 @@ purge_addressof_1 (loc, insn, force, store, ht)
start_sequence (); start_sequence ();
store_bit_field (sub, size_x, 0, GET_MODE (x), store_bit_field (sub, size_x, 0, GET_MODE (x),
val, GET_MODE_SIZE (GET_MODE (sub)), val, GET_MODE_SIZE (GET_MODE (sub)),
GET_MODE_SIZE (GET_MODE (sub))); GET_MODE_ALIGNMENT (GET_MODE (sub)));
/* Make sure to unshare any shared rtl that store_bit_field /* Make sure to unshare any shared rtl that store_bit_field
might have created. */ might have created. */
...@@ -4339,8 +4339,8 @@ assign_parms (fndecl) ...@@ -4339,8 +4339,8 @@ assign_parms (fndecl)
if (GET_CODE (entry_parm) == PARALLEL) if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm, emit_group_store (validize_mem (stack_parm), entry_parm,
int_size_in_bytes (TREE_TYPE (parm)), int_size_in_bytes (TREE_TYPE (parm)),
(TYPE_ALIGN (TREE_TYPE (parm)) TYPE_ALIGN (TREE_TYPE (parm)));
/ BITS_PER_UNIT));
else else
move_block_from_reg (REGNO (entry_parm), move_block_from_reg (REGNO (entry_parm),
validize_mem (stack_parm), nregs, validize_mem (stack_parm), nregs,
...@@ -4498,8 +4498,7 @@ assign_parms (fndecl) ...@@ -4498,8 +4498,7 @@ assign_parms (fndecl)
if (GET_CODE (entry_parm) == PARALLEL) if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm, emit_group_store (validize_mem (stack_parm), entry_parm,
int_size_in_bytes (TREE_TYPE (parm)), int_size_in_bytes (TREE_TYPE (parm)),
(TYPE_ALIGN (TREE_TYPE (parm)) TYPE_ALIGN (TREE_TYPE (parm)));
/ BITS_PER_UNIT));
else else
move_block_from_reg (REGNO (entry_parm), move_block_from_reg (REGNO (entry_parm),
validize_mem (stack_parm), validize_mem (stack_parm),
......
...@@ -2916,6 +2916,7 @@ prepare_cmp_insn (px, py, pcomparison, size, pmode, punsignedp, align, ...@@ -2916,6 +2916,7 @@ prepare_cmp_insn (px, py, pcomparison, size, pmode, punsignedp, align,
rtx x = *px, y = *py; rtx x = *px, y = *py;
int unsignedp = *punsignedp; int unsignedp = *punsignedp;
enum mode_class class; enum mode_class class;
rtx opalign ATTRIBUTE_UNUSED = GEN_INT (align / BITS_PER_UNIT);;
class = GET_MODE_CLASS (mode); class = GET_MODE_CLASS (mode);
...@@ -2932,10 +2933,12 @@ prepare_cmp_insn (px, py, pcomparison, size, pmode, punsignedp, align, ...@@ -2932,10 +2933,12 @@ prepare_cmp_insn (px, py, pcomparison, size, pmode, punsignedp, align,
/* If we are inside an appropriately-short loop and one operand is an /* If we are inside an appropriately-short loop and one operand is an
expensive constant, force it into a register. */ expensive constant, force it into a register. */
if (CONSTANT_P (x) && preserve_subexpressions_p () && rtx_cost (x, COMPARE) > 2) if (CONSTANT_P (x) && preserve_subexpressions_p ()
&& rtx_cost (x, COMPARE) > 2)
x = force_reg (mode, x); x = force_reg (mode, x);
if (CONSTANT_P (y) && preserve_subexpressions_p () && rtx_cost (y, COMPARE) > 2) if (CONSTANT_P (y) && preserve_subexpressions_p ()
&& rtx_cost (y, COMPARE) > 2)
y = force_reg (mode, y); y = force_reg (mode, y);
#ifdef HAVE_cc0 #ifdef HAVE_cc0
...@@ -2970,7 +2973,7 @@ prepare_cmp_insn (px, py, pcomparison, size, pmode, punsignedp, align, ...@@ -2970,7 +2973,7 @@ prepare_cmp_insn (px, py, pcomparison, size, pmode, punsignedp, align,
{ {
result_mode = insn_data[(int) CODE_FOR_cmpstrqi].operand[0].mode; result_mode = insn_data[(int) CODE_FOR_cmpstrqi].operand[0].mode;
result = gen_reg_rtx (result_mode); result = gen_reg_rtx (result_mode);
emit_insn (gen_cmpstrqi (result, x, y, size, GEN_INT (align))); emit_insn (gen_cmpstrqi (result, x, y, size, opalign));
} }
else else
#endif #endif
...@@ -2981,7 +2984,7 @@ prepare_cmp_insn (px, py, pcomparison, size, pmode, punsignedp, align, ...@@ -2981,7 +2984,7 @@ prepare_cmp_insn (px, py, pcomparison, size, pmode, punsignedp, align,
{ {
result_mode = insn_data[(int) CODE_FOR_cmpstrhi].operand[0].mode; result_mode = insn_data[(int) CODE_FOR_cmpstrhi].operand[0].mode;
result = gen_reg_rtx (result_mode); result = gen_reg_rtx (result_mode);
emit_insn (gen_cmpstrhi (result, x, y, size, GEN_INT (align))); emit_insn (gen_cmpstrhi (result, x, y, size, opalign));
} }
else else
#endif #endif
...@@ -2993,7 +2996,7 @@ prepare_cmp_insn (px, py, pcomparison, size, pmode, punsignedp, align, ...@@ -2993,7 +2996,7 @@ prepare_cmp_insn (px, py, pcomparison, size, pmode, punsignedp, align,
size = protect_from_queue (size, 0); size = protect_from_queue (size, 0);
emit_insn (gen_cmpstrsi (result, x, y, emit_insn (gen_cmpstrsi (result, x, y,
convert_to_mode (SImode, size, 1), convert_to_mode (SImode, size, 1),
GEN_INT (align))); opalign));
} }
else else
#endif #endif
...@@ -3190,7 +3193,7 @@ emit_cmp_and_jump_insns (x, y, comparison, size, mode, unsignedp, align, label) ...@@ -3190,7 +3193,7 @@ emit_cmp_and_jump_insns (x, y, comparison, size, mode, unsignedp, align, label)
rtx size; rtx size;
enum machine_mode mode; enum machine_mode mode;
int unsignedp; int unsignedp;
int align; unsigned int align;
rtx label; rtx label;
{ {
rtx op0; rtx op0;
...@@ -3227,6 +3230,7 @@ emit_cmp_and_jump_insns (x, y, comparison, size, mode, unsignedp, align, label) ...@@ -3227,6 +3230,7 @@ emit_cmp_and_jump_insns (x, y, comparison, size, mode, unsignedp, align, label)
} }
/* Like emit_cmp_and_jump_insns, but generate only the comparison. */ /* Like emit_cmp_and_jump_insns, but generate only the comparison. */
void void
emit_cmp_insn (x, y, comparison, size, mode, unsignedp, align) emit_cmp_insn (x, y, comparison, size, mode, unsignedp, align)
rtx x, y; rtx x, y;
...@@ -3234,7 +3238,7 @@ emit_cmp_insn (x, y, comparison, size, mode, unsignedp, align) ...@@ -3234,7 +3238,7 @@ emit_cmp_insn (x, y, comparison, size, mode, unsignedp, align)
rtx size; rtx size;
enum machine_mode mode; enum machine_mode mode;
int unsignedp; int unsignedp;
int align; unsigned int align;
{ {
emit_cmp_and_jump_insns (x, y, comparison, size, mode, unsignedp, align, 0); emit_cmp_and_jump_insns (x, y, comparison, size, mode, unsignedp, align, 0);
} }
......
...@@ -2725,7 +2725,7 @@ expand_value_return (val) ...@@ -2725,7 +2725,7 @@ expand_value_return (val)
#endif #endif
if (GET_CODE (return_reg) == PARALLEL) if (GET_CODE (return_reg) == PARALLEL)
emit_group_load (return_reg, val, int_size_in_bytes (type), emit_group_load (return_reg, val, int_size_in_bytes (type),
TYPE_ALIGN (type) / BITS_PER_UNIT); TYPE_ALIGN (type));
else else
emit_move_insn (return_reg, val); emit_move_insn (return_reg, val);
} }
...@@ -3014,11 +3014,9 @@ expand_return (retval) ...@@ -3014,11 +3014,9 @@ expand_return (retval)
store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode, store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
extract_bit_field (src, bitsize, extract_bit_field (src, bitsize,
bitpos % BITS_PER_WORD, 1, bitpos % BITS_PER_WORD, 1,
NULL_RTX, word_mode, NULL_RTX, word_mode, word_mode,
word_mode, bitsize, BITS_PER_WORD),
bitsize / BITS_PER_UNIT, bitsize, BITS_PER_WORD);
BITS_PER_WORD),
bitsize / BITS_PER_UNIT, BITS_PER_WORD);
} }
/* Find the smallest integer mode large enough to hold the /* Find the smallest integer mode large enough to hold the
......
...@@ -1767,12 +1767,10 @@ unsigned int ...@@ -1767,12 +1767,10 @@ unsigned int
get_mode_alignment (mode) get_mode_alignment (mode)
enum machine_mode mode; enum machine_mode mode;
{ {
unsigned alignment = GET_MODE_UNIT_SIZE (mode); unsigned int alignment = GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT;
/* Extract the LSB of the size. */ /* Extract the LSB of the size. */
alignment = alignment & -alignment; alignment = alignment & -alignment;
alignment *= BITS_PER_UNIT;
alignment = MIN (BIGGEST_ALIGNMENT, MAX (1, alignment)); alignment = MIN (BIGGEST_ALIGNMENT, MAX (1, alignment));
return alignment; return alignment;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment