Commit 5b0264cb by Nathan Sidwell

emit-rtl.c (immed_double_const): Use gcc_assert and gcc_unreachable.

	* emit-rtl.c (immed_double_const): Use gcc_assert and gcc_unreachable.
	(gen_rtx_SUBREG, gen_reg_rtx, mark_user_reg, subreg_hard_regno,
	gen_lowpart_common, gen_highpart, gen_highpart_mode,
	subreg_highpart_offset, operand_subword, operand_subword_force,
	mem_expr_equal_p, set_mem_attributes_minus_bitpos,
	set_mem_alias_set, change_address_1, verify_rtx_sharing,
	copy_most_rtx, set_first_insn, set_last_insn, prev_cc0_setter,
	try_split, add_insn_after, add_insn_before, remove_insn,
	remove_unnecessary_notes, emit_insn_before, emit_jump_insn_before,
	emit_call_insn_before, emit_insn_after, emit_jump_insn_after,
	emit_call_insn_after, emit_insn, emit_jump_insn, emit_call_insn,
	set_unique_reg_note, emit, push_to_full_sequence, copy_insn_1,
	gen_const_vector_0, emit_copy_of_insn_after): Likewise.
	* et-forest.c (set_prev, set_next, et_check_occ_sanity,
	record_path_before_1, check_path_after_1, check_path_after): Likewise.
	* except.c (gen_eh_region, resolve_one_fixup_region,
	remove_unreachable_regions, convert_from_eh_region_ranges_1,
	add_ehl_entry, duplicate_eh_region_1, build_post_landing_pads,
	connect_post_landing_pads, sjlj_emit_function_exit,
	remove_exception_handler_label, remove_eh_handler,
	reachable_next_level, collect_one_action_chain,
	output_function_exception_table): Likewise.
	* explow.c (trunc_int_for_mode, copy_to_mode_reg,
	optimize_save_area_alloca, allocate_dynamic_stack_space,
	probe_stack_range, hard_function_value): Likewise.
	* expmed.c (mode_for_extraction, store_bit_field,
	store_fixed_bit_field, extract_bit_field, expand_shift,
	expand_mult_const, expand_mult, choose_multiplier,
	expand_mult_highpart, expand_divmod, emit_store_flag,
	do_cmp_and_jump): Likewise.
	* expr.c (convert_move, convert_modes, move_by_pieces,
	move_by_pieces_ninsns, move_by_pieces_1, emit_block_move,
	move_block_from_reg, gen_group_rtx, emit_group_load,
	emit_group_move, emit_group_store, use_reg, use_regs,
	can_store_by_pieces, store_by_pieces, store_by_pieces_1,
	emit_move_insn, emit_move_insn_1, emit_push_insn,
	expand_assignment, store_expr, count_type_elements,
	store_constructor, store_field, safe_from_p, expand_var,
	expand_expr_addr_expr, expand_expr_real_1, do_store_flag): Likewise.

From-SVN: r87178
parent ced3f397
2004-09-08 Nathan Sidwell <nathan@codesourcery.com> 2004-09-08 Nathan Sidwell <nathan@codesourcery.com>
* emit-rtl.c (immed_double_const): Use gcc_assert and gcc_unreachable.
(gen_rtx_SUBREG, gen_reg_rtx, mark_user_reg, subreg_hard_regno,
gen_lowpart_common, gen_highpart, gen_highpart_mode,
subreg_highpart_offset, operand_subword, operand_subword_force,
mem_expr_equal_p, set_mem_attributes_minus_bitpos,
set_mem_alias_set, change_address_1, verify_rtx_sharing,
copy_most_rtx, set_first_insn, set_last_insn, prev_cc0_setter,
try_split, add_insn_after, add_insn_before, remove_insn,
remove_unnecessary_notes, emit_insn_before, emit_jump_insn_before,
emit_call_insn_before, emit_insn_after, emit_jump_insn_after,
emit_call_insn_after, emit_insn, emit_jump_insn, emit_call_insn,
set_unique_reg_note, emit, push_to_full_sequence, copy_insn_1,
gen_const_vector_0, emit_copy_of_insn_after): Likewise.
* et-forest.c (set_prev, set_next, et_check_occ_sanity,
record_path_before_1, check_path_after_1, check_path_after): Likewise.
* except.c (gen_eh_region, resolve_one_fixup_region,
remove_unreachable_regions, convert_from_eh_region_ranges_1,
add_ehl_entry, duplicate_eh_region_1, build_post_landing_pads,
connect_post_landing_pads, sjlj_emit_function_exit,
remove_exception_handler_label, remove_eh_handler,
reachable_next_level, collect_one_action_chain,
output_function_exception_table): Likewise.
* explow.c (trunc_int_for_mode, copy_to_mode_reg,
optimize_save_area_alloca, allocate_dynamic_stack_space,
probe_stack_range, hard_function_value): Likewise.
* expmed.c (mode_for_extraction, store_bit_field,
store_fixed_bit_field, extract_bit_field, expand_shift,
expand_mult_const, expand_mult, choose_multiplier,
expand_mult_highpart, expand_divmod, emit_store_flag,
do_cmp_and_jump): Likewise.
* expr.c (convert_move, convert_modes, move_by_pieces,
move_by_pieces_ninsns, move_by_pieces_1, emit_block_move,
move_block_from_reg, gen_group_rtx, emit_group_load,
emit_group_move, emit_group_store, use_reg, use_regs,
can_store_by_pieces, store_by_pieces, store_by_pieces_1,
emit_move_insn, emit_move_insn_1, emit_push_insn,
expand_assignment, store_expr, count_type_elements,
store_constructor, store_field, safe_from_p, expand_var,
expand_expr_addr_expr, expand_expr_real_1, do_store_flag): Likewise.
2004-09-08 Nathan Sidwell <nathan@codesourcery.com>
* dbxout.c (dbxout_type, dbxout_type_name, dbxout_symbol): Use * dbxout.c (dbxout_type, dbxout_type_name, dbxout_symbol): Use
gcc_assert and gcc_unreachable. gcc_assert and gcc_unreachable.
* ddg.c (create_ddg_dependence, add_deps_for_def, * ddg.c (create_ddg_dependence, add_deps_for_def,
...@@ -6066,7 +6108,7 @@ ...@@ -6066,7 +6108,7 @@
* config/i386/xmmintrin.h: Include <mm_malloc.h>. * config/i386/xmmintrin.h: Include <mm_malloc.h>.
2004-08-03 H.J. Lu <hongjiu.lu@intel.com> 2004-08-03 H.J. Lu <hongjiu.lu@intel.com>
Tanguy Fautrà <tfautre@pandora.be> Tanguy Fautr <tfautre@pandora.be>
* config/i386/pmm_malloc.h: New file. * config/i386/pmm_malloc.h: New file.
......
...@@ -456,12 +456,12 @@ immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode) ...@@ -456,12 +456,12 @@ immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
if (mode != VOIDmode) if (mode != VOIDmode)
{ {
int width; int width;
if (GET_MODE_CLASS (mode) != MODE_INT
&& GET_MODE_CLASS (mode) != MODE_PARTIAL_INT gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
/* We can get a 0 for an error mark. */ || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
&& GET_MODE_CLASS (mode) != MODE_VECTOR_INT /* We can get a 0 for an error mark. */
&& GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT) || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
abort (); || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
/* We clear out all bits that don't belong in MODE, unless they and /* We clear out all bits that don't belong in MODE, unless they and
our sign bit are all one. So we get either a reasonable negative our sign bit are all one. So we get either a reasonable negative
...@@ -474,9 +474,9 @@ immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode) ...@@ -474,9 +474,9 @@ immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
else if (width == HOST_BITS_PER_WIDE_INT else if (width == HOST_BITS_PER_WIDE_INT
&& ! (i1 == ~0 && i0 < 0)) && ! (i1 == ~0 && i0 < 0))
i1 = 0; i1 = 0;
else if (width > 2 * HOST_BITS_PER_WIDE_INT) else
/* We cannot represent this value as a constant. */ /* We should be able to represent this value as a constant. */
abort (); gcc_assert (width <= 2 * HOST_BITS_PER_WIDE_INT);
/* If this would be an entire word for the target, but is not for /* If this would be an entire word for the target, but is not for
the host, then sign-extend on the host so that the number will the host, then sign-extend on the host so that the number will
...@@ -623,16 +623,14 @@ gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset) ...@@ -623,16 +623,14 @@ gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
{ {
/* This is the most common failure type. /* This is the most common failure type.
Catch it early so we can see who does it. */ Catch it early so we can see who does it. */
if ((offset % GET_MODE_SIZE (mode)) != 0) gcc_assert (!(offset % GET_MODE_SIZE (mode)));
abort ();
/* This check isn't usable right now because combine will /* This check isn't usable right now because combine will
throw arbitrary crap like a CALL into a SUBREG in throw arbitrary crap like a CALL into a SUBREG in
gen_lowpart_for_combine so we must just eat it. */ gen_lowpart_for_combine so we must just eat it. */
#if 0 #if 0
/* Check for this too. */ /* Check for this too. */
if (offset >= GET_MODE_SIZE (GET_MODE (reg))) gcc_assert (offset < GET_MODE_SIZE (GET_MODE (reg)));
abort ();
#endif #endif
return gen_rtx_raw_SUBREG (mode, reg, offset); return gen_rtx_raw_SUBREG (mode, reg, offset);
} }
...@@ -711,8 +709,7 @@ gen_reg_rtx (enum machine_mode mode) ...@@ -711,8 +709,7 @@ gen_reg_rtx (enum machine_mode mode)
/* Don't let anything called after initial flow analysis create new /* Don't let anything called after initial flow analysis create new
registers. */ registers. */
if (no_new_pseudos) gcc_assert (!no_new_pseudos);
abort ();
if (generating_concat_p if (generating_concat_p
&& (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
...@@ -972,10 +969,11 @@ mark_user_reg (rtx reg) ...@@ -972,10 +969,11 @@ mark_user_reg (rtx reg)
REG_USERVAR_P (XEXP (reg, 0)) = 1; REG_USERVAR_P (XEXP (reg, 0)) = 1;
REG_USERVAR_P (XEXP (reg, 1)) = 1; REG_USERVAR_P (XEXP (reg, 1)) = 1;
} }
else if (REG_P (reg))
REG_USERVAR_P (reg) = 1;
else else
abort (); {
gcc_assert (REG_P (reg));
REG_USERVAR_P (reg) = 1;
}
} }
/* Identify REG as a probable pointer register and show its alignment /* Identify REG as a probable pointer register and show its alignment
...@@ -1044,23 +1042,17 @@ subreg_hard_regno (rtx x, int check_mode) ...@@ -1044,23 +1042,17 @@ subreg_hard_regno (rtx x, int check_mode)
/* This is where we attempt to catch illegal subregs /* This is where we attempt to catch illegal subregs
created by the compiler. */ created by the compiler. */
if (GET_CODE (x) != SUBREG gcc_assert (GET_CODE (x) == SUBREG && REG_P (reg));
|| !REG_P (reg))
abort ();
base_regno = REGNO (reg); base_regno = REGNO (reg);
if (base_regno >= FIRST_PSEUDO_REGISTER) gcc_assert (base_regno < FIRST_PSEUDO_REGISTER);
abort (); gcc_assert (!check_mode || HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)));
if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
abort ();
#ifdef ENABLE_CHECKING #ifdef ENABLE_CHECKING
if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg), gcc_assert (subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
SUBREG_BYTE (x), mode)) SUBREG_BYTE (x), mode));
abort ();
#endif #endif
/* Catch non-congruent offsets too. */ /* Catch non-congruent offsets too. */
byte_offset = SUBREG_BYTE (x); byte_offset = SUBREG_BYTE (x);
if ((byte_offset % GET_MODE_SIZE (mode)) != 0) gcc_assert (!(byte_offset % GET_MODE_SIZE (mode)));
abort ();
final_regno = subreg_regno (x); final_regno = subreg_regno (x);
...@@ -1096,8 +1088,7 @@ gen_lowpart_common (enum machine_mode mode, rtx x) ...@@ -1096,8 +1088,7 @@ gen_lowpart_common (enum machine_mode mode, rtx x)
xsize = GET_MODE_SIZE (innermode); xsize = GET_MODE_SIZE (innermode);
if (innermode == VOIDmode || innermode == BLKmode) gcc_assert (innermode != VOIDmode && innermode != BLKmode);
abort ();
if (innermode == mode) if (innermode == mode)
return x; return x;
...@@ -1224,21 +1215,22 @@ gen_highpart (enum machine_mode mode, rtx x) ...@@ -1224,21 +1215,22 @@ gen_highpart (enum machine_mode mode, rtx x)
/* This case loses if X is a subreg. To catch bugs early, /* This case loses if X is a subreg. To catch bugs early,
complain if an invalid MODE is used even in other cases. */ complain if an invalid MODE is used even in other cases. */
if (msize > UNITS_PER_WORD gcc_assert (msize <= UNITS_PER_WORD
&& msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x))) || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
abort ();
result = simplify_gen_subreg (mode, x, GET_MODE (x), result = simplify_gen_subreg (mode, x, GET_MODE (x),
subreg_highpart_offset (mode, GET_MODE (x))); subreg_highpart_offset (mode, GET_MODE (x)));
gcc_assert (result);
/* simplify_gen_subreg is not guaranteed to return a valid operand for /* simplify_gen_subreg is not guaranteed to return a valid operand for
the target if we have a MEM. gen_highpart must return a valid operand, the target if we have a MEM. gen_highpart must return a valid operand,
emitting code if necessary to do so. */ emitting code if necessary to do so. */
if (result != NULL_RTX && MEM_P (result)) if (MEM_P (result))
result = validize_mem (result); {
result = validize_mem (result);
if (!result) gcc_assert (result);
abort (); }
return result; return result;
} }
...@@ -1249,8 +1241,7 @@ gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx ...@@ -1249,8 +1241,7 @@ gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx
{ {
if (GET_MODE (exp) != VOIDmode) if (GET_MODE (exp) != VOIDmode)
{ {
if (GET_MODE (exp) != innermode) gcc_assert (GET_MODE (exp) == innermode);
abort ();
return gen_highpart (outermode, exp); return gen_highpart (outermode, exp);
} }
return simplify_gen_subreg (outermode, exp, innermode, return simplify_gen_subreg (outermode, exp, innermode,
...@@ -1285,8 +1276,7 @@ subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode ...@@ -1285,8 +1276,7 @@ subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode
unsigned int offset = 0; unsigned int offset = 0;
int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)); int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode)) gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
abort ();
if (difference > 0) if (difference > 0)
{ {
...@@ -1346,8 +1336,7 @@ operand_subword (rtx op, unsigned int offset, int validate_address, enum machine ...@@ -1346,8 +1336,7 @@ operand_subword (rtx op, unsigned int offset, int validate_address, enum machine
if (mode == VOIDmode) if (mode == VOIDmode)
mode = GET_MODE (op); mode = GET_MODE (op);
if (mode == VOIDmode) gcc_assert (mode != VOIDmode);
abort ();
/* If OP is narrower than a word, fail. */ /* If OP is narrower than a word, fail. */
if (mode != BLKmode if (mode != BLKmode
...@@ -1405,8 +1394,7 @@ operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode) ...@@ -1405,8 +1394,7 @@ operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
} }
result = operand_subword (op, offset, 1, mode); result = operand_subword (op, offset, 1, mode);
if (result == 0) gcc_assert (result);
abort ();
return result; return result;
} }
...@@ -1500,13 +1488,13 @@ mem_expr_equal_p (tree expr1, tree expr2) ...@@ -1500,13 +1488,13 @@ mem_expr_equal_p (tree expr1, tree expr2)
if (TREE_CODE (expr1) == INDIRECT_REF) if (TREE_CODE (expr1) == INDIRECT_REF)
return mem_expr_equal_p (TREE_OPERAND (expr1, 0), return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
TREE_OPERAND (expr2, 0)); TREE_OPERAND (expr2, 0));
/* Decls with different pointers can't be equal. */
if (DECL_P (expr1))
return 0;
abort(); /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
have been resolved here. */ have been resolved here. */
gcc_assert (DECL_P (expr1));
/* Decls with different pointers can't be equal. */
return 0;
} }
/* Given REF, a MEM, and T, either the type of X or the expression /* Given REF, a MEM, and T, either the type of X or the expression
...@@ -1540,8 +1528,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, ...@@ -1540,8 +1528,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
wrong answer, as it assumes that DECL_RTL already has the right alias wrong answer, as it assumes that DECL_RTL already has the right alias
info. Callers should not set DECL_RTL until after the call to info. Callers should not set DECL_RTL until after the call to
set_mem_attributes. */ set_mem_attributes. */
if (DECL_P (t) && ref == DECL_RTL_IF_SET (t)) gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
abort ();
/* Get the alias set from the expression or type (perhaps using a /* Get the alias set from the expression or type (perhaps using a
front-end routine) and use it. */ front-end routine) and use it. */
...@@ -1754,8 +1741,7 @@ set_mem_alias_set (rtx mem, HOST_WIDE_INT set) ...@@ -1754,8 +1741,7 @@ set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
{ {
#ifdef ENABLE_CHECKING #ifdef ENABLE_CHECKING
/* If the new and old alias sets don't conflict, something is wrong. */ /* If the new and old alias sets don't conflict, something is wrong. */
if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))) gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
abort ();
#endif #endif
MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem), MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
...@@ -1814,8 +1800,7 @@ change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate) ...@@ -1814,8 +1800,7 @@ change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
{ {
rtx new; rtx new;
if (!MEM_P (memref)) gcc_assert (MEM_P (memref));
abort ();
if (mode == VOIDmode) if (mode == VOIDmode)
mode = GET_MODE (memref); mode = GET_MODE (memref);
if (addr == 0) if (addr == 0)
...@@ -1827,10 +1812,7 @@ change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate) ...@@ -1827,10 +1812,7 @@ change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
if (validate) if (validate)
{ {
if (reload_in_progress || reload_completed) if (reload_in_progress || reload_completed)
{ gcc_assert (memory_address_p (mode, addr));
if (! memory_address_p (mode, addr))
abort ();
}
else else
addr = memory_address (mode, addr); addr = memory_address (mode, addr);
} }
...@@ -2285,7 +2267,7 @@ verify_rtx_sharing (rtx orig, rtx insn) ...@@ -2285,7 +2267,7 @@ verify_rtx_sharing (rtx orig, rtx insn)
debug_rtx (insn); debug_rtx (insn);
error ("Shared rtx"); error ("Shared rtx");
debug_rtx (x); debug_rtx (x);
abort (); fatal_error ("Internal consistency failure");
} }
RTX_FLAG (x, used) = 1; RTX_FLAG (x, used) = 1;
...@@ -2490,7 +2472,7 @@ copy_most_rtx (rtx orig, rtx may_share) ...@@ -2490,7 +2472,7 @@ copy_most_rtx (rtx orig, rtx may_share)
break; break;
default: default:
abort (); gcc_unreachable ();
} }
} }
return copy; return copy;
...@@ -2822,8 +2804,7 @@ get_insns (void) ...@@ -2822,8 +2804,7 @@ get_insns (void)
void void
set_first_insn (rtx insn) set_first_insn (rtx insn)
{ {
if (PREV_INSN (insn) != 0) gcc_assert (!PREV_INSN (insn));
abort ();
first_insn = insn; first_insn = insn;
} }
...@@ -2840,8 +2821,7 @@ get_last_insn (void) ...@@ -2840,8 +2821,7 @@ get_last_insn (void)
void void
set_last_insn (rtx insn) set_last_insn (rtx insn)
{ {
if (NEXT_INSN (insn) != 0) gcc_assert (!NEXT_INSN (insn));
abort ();
last_insn = insn; last_insn = insn;
} }
...@@ -3191,8 +3171,7 @@ prev_cc0_setter (rtx insn) ...@@ -3191,8 +3171,7 @@ prev_cc0_setter (rtx insn)
return XEXP (note, 0); return XEXP (note, 0);
insn = prev_nonnote_insn (insn); insn = prev_nonnote_insn (insn);
if (! sets_cc0_p (PATTERN (insn))) gcc_assert (sets_cc0_p (PATTERN (insn)));
abort ();
return insn; return insn;
} }
...@@ -3292,8 +3271,7 @@ try_split (rtx pat, rtx trial, int last) ...@@ -3292,8 +3271,7 @@ try_split (rtx pat, rtx trial, int last)
one jump is created, otherwise the machine description one jump is created, otherwise the machine description
is responsible for this step using is responsible for this step using
split_branch_probability variable. */ split_branch_probability variable. */
if (njumps != 1) gcc_assert (njumps == 1);
abort ();
REG_NOTES (insn) REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_BR_PROB, = gen_rtx_EXPR_LIST (REG_BR_PROB,
GEN_INT (probability), GEN_INT (probability),
...@@ -3510,8 +3488,7 @@ add_insn_after (rtx insn, rtx after) ...@@ -3510,8 +3488,7 @@ add_insn_after (rtx insn, rtx after)
rtx next = NEXT_INSN (after); rtx next = NEXT_INSN (after);
basic_block bb; basic_block bb;
if (optimize && INSN_DELETED_P (after)) gcc_assert (!optimize || !INSN_DELETED_P (after));
abort ();
NEXT_INSN (insn) = next; NEXT_INSN (insn) = next;
PREV_INSN (insn) = after; PREV_INSN (insn) = after;
...@@ -3535,8 +3512,7 @@ add_insn_after (rtx insn, rtx after) ...@@ -3535,8 +3512,7 @@ add_insn_after (rtx insn, rtx after)
break; break;
} }
if (stack == 0) gcc_assert (stack);
abort ();
} }
if (!BARRIER_P (after) if (!BARRIER_P (after)
...@@ -3575,8 +3551,7 @@ add_insn_before (rtx insn, rtx before) ...@@ -3575,8 +3551,7 @@ add_insn_before (rtx insn, rtx before)
rtx prev = PREV_INSN (before); rtx prev = PREV_INSN (before);
basic_block bb; basic_block bb;
if (optimize && INSN_DELETED_P (before)) gcc_assert (!optimize || !INSN_DELETED_P (before));
abort ();
PREV_INSN (insn) = prev; PREV_INSN (insn) = prev;
NEXT_INSN (insn) = before; NEXT_INSN (insn) = before;
...@@ -3603,8 +3578,7 @@ add_insn_before (rtx insn, rtx before) ...@@ -3603,8 +3578,7 @@ add_insn_before (rtx insn, rtx before)
break; break;
} }
if (stack == 0) gcc_assert (stack);
abort ();
} }
if (!BARRIER_P (before) if (!BARRIER_P (before)
...@@ -3614,14 +3588,13 @@ add_insn_before (rtx insn, rtx before) ...@@ -3614,14 +3588,13 @@ add_insn_before (rtx insn, rtx before)
set_block_for_insn (insn, bb); set_block_for_insn (insn, bb);
if (INSN_P (insn)) if (INSN_P (insn))
bb->flags |= BB_DIRTY; bb->flags |= BB_DIRTY;
/* Should not happen as first in the BB is always /* Should not happen as first in the BB is always either NOTE or
either NOTE or LABEl. */ LABEl. */
if (BB_HEAD (bb) == insn gcc_assert (BB_HEAD (bb) != insn
/* Avoid clobbering of structure when creating new BB. */ /* Avoid clobbering of structure when creating new BB. */
&& !BARRIER_P (insn) || BARRIER_P (insn)
&& (!NOTE_P (insn) || (NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)) && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
abort ();
} }
PREV_INSN (before) = insn; PREV_INSN (before) = insn;
...@@ -3660,8 +3633,7 @@ remove_insn (rtx insn) ...@@ -3660,8 +3633,7 @@ remove_insn (rtx insn)
break; break;
} }
if (stack == 0) gcc_assert (stack);
abort ();
} }
if (next) if (next)
...@@ -3683,8 +3655,7 @@ remove_insn (rtx insn) ...@@ -3683,8 +3655,7 @@ remove_insn (rtx insn)
break; break;
} }
if (stack == 0) gcc_assert (stack);
abort ();
} }
if (!BARRIER_P (insn) if (!BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (insn))) && (bb = BLOCK_FOR_INSN (insn)))
...@@ -3695,8 +3666,7 @@ remove_insn (rtx insn) ...@@ -3695,8 +3666,7 @@ remove_insn (rtx insn)
{ {
/* Never ever delete the basic block note without deleting whole /* Never ever delete the basic block note without deleting whole
basic block. */ basic block. */
if (NOTE_P (insn)) gcc_assert (!NOTE_P (insn));
abort ();
BB_HEAD (bb) = next; BB_HEAD (bb) = next;
} }
if (BB_END (bb) == insn) if (BB_END (bb) == insn)
...@@ -3709,8 +3679,7 @@ remove_insn (rtx insn) ...@@ -3709,8 +3679,7 @@ remove_insn (rtx insn)
void void
add_function_usage_to (rtx call_insn, rtx call_fusage) add_function_usage_to (rtx call_insn, rtx call_fusage)
{ {
if (! call_insn || !CALL_P (call_insn)) gcc_assert (call_insn && CALL_P (call_insn));
abort ();
/* Put the register usage information on the CALL. If there is already /* Put the register usage information on the CALL. If there is already
some usage information, put ours at the end. */ some usage information, put ours at the end. */
...@@ -3857,11 +3826,10 @@ remove_unnecessary_notes (void) ...@@ -3857,11 +3826,10 @@ remove_unnecessary_notes (void)
case NOTE_INSN_EH_REGION_END: case NOTE_INSN_EH_REGION_END:
/* Too many end notes. */ /* Too many end notes. */
if (eh_stack == NULL_RTX) gcc_assert (eh_stack);
abort ();
/* Mismatched nesting. */ /* Mismatched nesting. */
if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn)) gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0))
abort (); == NOTE_EH_HANDLER (insn));
tmp = eh_stack; tmp = eh_stack;
eh_stack = XEXP (eh_stack, 1); eh_stack = XEXP (eh_stack, 1);
free_INSN_LIST_node (tmp); free_INSN_LIST_node (tmp);
...@@ -3870,18 +3838,15 @@ remove_unnecessary_notes (void) ...@@ -3870,18 +3838,15 @@ remove_unnecessary_notes (void)
case NOTE_INSN_BLOCK_BEG: case NOTE_INSN_BLOCK_BEG:
/* By now, all notes indicating lexical blocks should have /* By now, all notes indicating lexical blocks should have
NOTE_BLOCK filled in. */ NOTE_BLOCK filled in. */
if (NOTE_BLOCK (insn) == NULL_TREE) gcc_assert (NOTE_BLOCK (insn));
abort ();
block_stack = alloc_INSN_LIST (insn, block_stack); block_stack = alloc_INSN_LIST (insn, block_stack);
break; break;
case NOTE_INSN_BLOCK_END: case NOTE_INSN_BLOCK_END:
/* Too many end notes. */ /* Too many end notes. */
if (block_stack == NULL_RTX) gcc_assert (block_stack);
abort ();
/* Mismatched nesting. */ /* Mismatched nesting. */
if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn)) gcc_assert (NOTE_BLOCK (XEXP (block_stack, 0)) == NOTE_BLOCK (insn));
abort ();
tmp = block_stack; tmp = block_stack;
block_stack = XEXP (block_stack, 1); block_stack = XEXP (block_stack, 1);
free_INSN_LIST_node (tmp); free_INSN_LIST_node (tmp);
...@@ -3930,8 +3895,7 @@ remove_unnecessary_notes (void) ...@@ -3930,8 +3895,7 @@ remove_unnecessary_notes (void)
} }
/* Too many begin notes. */ /* Too many begin notes. */
if (block_stack || eh_stack) gcc_assert (!block_stack && !eh_stack);
abort ();
} }
...@@ -3968,10 +3932,7 @@ emit_insn_before (rtx x, rtx before) ...@@ -3968,10 +3932,7 @@ emit_insn_before (rtx x, rtx before)
rtx last = before; rtx last = before;
rtx insn; rtx insn;
#ifdef ENABLE_RTL_CHECKING gcc_assert (before);
if (before == NULL_RTX)
abort ();
#endif
if (x == NULL_RTX) if (x == NULL_RTX)
return last; return last;
...@@ -3996,7 +3957,7 @@ emit_insn_before (rtx x, rtx before) ...@@ -3996,7 +3957,7 @@ emit_insn_before (rtx x, rtx before)
#ifdef ENABLE_RTL_CHECKING #ifdef ENABLE_RTL_CHECKING
case SEQUENCE: case SEQUENCE:
abort (); gcc_unreachable ();
break; break;
#endif #endif
...@@ -4017,10 +3978,7 @@ emit_jump_insn_before (rtx x, rtx before) ...@@ -4017,10 +3978,7 @@ emit_jump_insn_before (rtx x, rtx before)
{ {
rtx insn, last = NULL_RTX; rtx insn, last = NULL_RTX;
#ifdef ENABLE_RTL_CHECKING gcc_assert (before);
if (before == NULL_RTX)
abort ();
#endif
switch (GET_CODE (x)) switch (GET_CODE (x))
{ {
...@@ -4042,7 +4000,7 @@ emit_jump_insn_before (rtx x, rtx before) ...@@ -4042,7 +4000,7 @@ emit_jump_insn_before (rtx x, rtx before)
#ifdef ENABLE_RTL_CHECKING #ifdef ENABLE_RTL_CHECKING
case SEQUENCE: case SEQUENCE:
abort (); gcc_unreachable ();
break; break;
#endif #endif
...@@ -4063,10 +4021,7 @@ emit_call_insn_before (rtx x, rtx before) ...@@ -4063,10 +4021,7 @@ emit_call_insn_before (rtx x, rtx before)
{ {
rtx last = NULL_RTX, insn; rtx last = NULL_RTX, insn;
#ifdef ENABLE_RTL_CHECKING gcc_assert (before);
if (before == NULL_RTX)
abort ();
#endif
switch (GET_CODE (x)) switch (GET_CODE (x))
{ {
...@@ -4088,7 +4043,7 @@ emit_call_insn_before (rtx x, rtx before) ...@@ -4088,7 +4043,7 @@ emit_call_insn_before (rtx x, rtx before)
#ifdef ENABLE_RTL_CHECKING #ifdef ENABLE_RTL_CHECKING
case SEQUENCE: case SEQUENCE:
abort (); gcc_unreachable ();
break; break;
#endif #endif
...@@ -4196,10 +4151,7 @@ emit_insn_after (rtx x, rtx after) ...@@ -4196,10 +4151,7 @@ emit_insn_after (rtx x, rtx after)
{ {
rtx last = after; rtx last = after;
#ifdef ENABLE_RTL_CHECKING gcc_assert (after);
if (after == NULL_RTX)
abort ();
#endif
if (x == NULL_RTX) if (x == NULL_RTX)
return last; return last;
...@@ -4217,7 +4169,7 @@ emit_insn_after (rtx x, rtx after) ...@@ -4217,7 +4169,7 @@ emit_insn_after (rtx x, rtx after)
#ifdef ENABLE_RTL_CHECKING #ifdef ENABLE_RTL_CHECKING
case SEQUENCE: case SEQUENCE:
abort (); gcc_unreachable ();
break; break;
#endif #endif
...@@ -4255,10 +4207,7 @@ emit_jump_insn_after (rtx x, rtx after) ...@@ -4255,10 +4207,7 @@ emit_jump_insn_after (rtx x, rtx after)
{ {
rtx last; rtx last;
#ifdef ENABLE_RTL_CHECKING gcc_assert (after);
if (after == NULL_RTX)
abort ();
#endif
switch (GET_CODE (x)) switch (GET_CODE (x))
{ {
...@@ -4273,7 +4222,7 @@ emit_jump_insn_after (rtx x, rtx after) ...@@ -4273,7 +4222,7 @@ emit_jump_insn_after (rtx x, rtx after)
#ifdef ENABLE_RTL_CHECKING #ifdef ENABLE_RTL_CHECKING
case SEQUENCE: case SEQUENCE:
abort (); gcc_unreachable ();
break; break;
#endif #endif
...@@ -4294,10 +4243,7 @@ emit_call_insn_after (rtx x, rtx after) ...@@ -4294,10 +4243,7 @@ emit_call_insn_after (rtx x, rtx after)
{ {
rtx last; rtx last;
#ifdef ENABLE_RTL_CHECKING gcc_assert (after);
if (after == NULL_RTX)
abort ();
#endif
switch (GET_CODE (x)) switch (GET_CODE (x))
{ {
...@@ -4312,7 +4258,7 @@ emit_call_insn_after (rtx x, rtx after) ...@@ -4312,7 +4258,7 @@ emit_call_insn_after (rtx x, rtx after)
#ifdef ENABLE_RTL_CHECKING #ifdef ENABLE_RTL_CHECKING
case SEQUENCE: case SEQUENCE:
abort (); gcc_unreachable ();
break; break;
#endif #endif
...@@ -4513,7 +4459,7 @@ emit_insn (rtx x) ...@@ -4513,7 +4459,7 @@ emit_insn (rtx x)
#ifdef ENABLE_RTL_CHECKING #ifdef ENABLE_RTL_CHECKING
case SEQUENCE: case SEQUENCE:
abort (); gcc_unreachable ();
break; break;
#endif #endif
...@@ -4554,7 +4500,7 @@ emit_jump_insn (rtx x) ...@@ -4554,7 +4500,7 @@ emit_jump_insn (rtx x)
#ifdef ENABLE_RTL_CHECKING #ifdef ENABLE_RTL_CHECKING
case SEQUENCE: case SEQUENCE:
abort (); gcc_unreachable ();
break; break;
#endif #endif
...@@ -4588,7 +4534,7 @@ emit_call_insn (rtx x) ...@@ -4588,7 +4534,7 @@ emit_call_insn (rtx x)
#ifdef ENABLE_RTL_CHECKING #ifdef ENABLE_RTL_CHECKING
case SEQUENCE: case SEQUENCE:
abort (); gcc_unreachable ();
break; break;
#endif #endif
...@@ -4737,8 +4683,7 @@ set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum) ...@@ -4737,8 +4683,7 @@ set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
means the insn only has one * useful * set). */ means the insn only has one * useful * set). */
if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
{ {
if (note) gcc_assert (!note);
abort ();
return NULL_RTX; return NULL_RTX;
} }
...@@ -4807,21 +4752,24 @@ emit (rtx x) ...@@ -4807,21 +4752,24 @@ emit (rtx x)
{ {
enum rtx_code code = classify_insn (x); enum rtx_code code = classify_insn (x);
if (code == CODE_LABEL) switch (code)
return emit_label (x);
else if (code == INSN)
return emit_insn (x);
else if (code == JUMP_INSN)
{ {
rtx insn = emit_jump_insn (x); case CODE_LABEL:
if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN) return emit_label (x);
return emit_barrier (); case INSN:
return insn; return emit_insn (x);
case JUMP_INSN:
{
rtx insn = emit_jump_insn (x);
if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
return emit_barrier ();
return insn;
}
case CALL_INSN:
return emit_call_insn (x);
default:
gcc_unreachable ();
} }
else if (code == CALL_INSN)
return emit_call_insn (x);
else
abort ();
} }
/* Space for free sequence stack entries. */ /* Space for free sequence stack entries. */
...@@ -4883,8 +4831,7 @@ push_to_full_sequence (rtx first, rtx last) ...@@ -4883,8 +4831,7 @@ push_to_full_sequence (rtx first, rtx last)
first_insn = first; first_insn = first;
last_insn = last; last_insn = last;
/* We really should have the end of the insn chain here. */ /* We really should have the end of the insn chain here. */
if (last && NEXT_INSN (last)) gcc_assert (!last || !NEXT_INSN (last));
abort ();
} }
/* Set up the outer-level insn chain /* Set up the outer-level insn chain
...@@ -5104,15 +5051,14 @@ copy_insn_1 (rtx orig) ...@@ -5104,15 +5051,14 @@ copy_insn_1 (rtx orig)
break; break;
default: default:
abort (); gcc_unreachable ();
} }
} }
if (code == SCRATCH) if (code == SCRATCH)
{ {
i = copy_insn_n_scratches++; i = copy_insn_n_scratches++;
if (i >= MAX_RECOG_OPERANDS) gcc_assert (i < MAX_RECOG_OPERANDS);
abort ();
copy_insn_scratch_in[i] = orig; copy_insn_scratch_in[i] = orig;
copy_insn_scratch_out[i] = copy; copy_insn_scratch_out[i] = copy;
} }
...@@ -5227,8 +5173,7 @@ gen_const_vector_0 (enum machine_mode mode) ...@@ -5227,8 +5173,7 @@ gen_const_vector_0 (enum machine_mode mode)
v = rtvec_alloc (units); v = rtvec_alloc (units);
/* We need to call this function after we to set CONST0_RTX first. */ /* We need to call this function after we to set CONST0_RTX first. */
if (!CONST0_RTX (inner)) gcc_assert (CONST0_RTX (inner));
abort ();
for (i = 0; i < units; ++i) for (i = 0; i < units; ++i)
RTVEC_ELT (v, i) = CONST0_RTX (inner); RTVEC_ELT (v, i) = CONST0_RTX (inner);
...@@ -5477,7 +5422,7 @@ emit_copy_of_insn_after (rtx insn, rtx after) ...@@ -5477,7 +5422,7 @@ emit_copy_of_insn_after (rtx insn, rtx after)
break; break;
default: default:
abort (); gcc_unreachable ();
} }
/* Update LABEL_NUSES. */ /* Update LABEL_NUSES. */
......
...@@ -88,8 +88,7 @@ static inline void ...@@ -88,8 +88,7 @@ static inline void
set_prev (struct et_occ *occ, struct et_occ *t) set_prev (struct et_occ *occ, struct et_occ *t)
{ {
#ifdef DEBUG_ET #ifdef DEBUG_ET
if (occ == t) gcc_assert (occ != t);
abort ();
#endif #endif
occ->prev = t; occ->prev = t;
...@@ -103,8 +102,7 @@ static inline void ...@@ -103,8 +102,7 @@ static inline void
set_next (struct et_occ *occ, struct et_occ *t) set_next (struct et_occ *occ, struct et_occ *t)
{ {
#ifdef DEBUG_ET #ifdef DEBUG_ET
if (occ == t) gcc_assert (occ != t);
abort ();
#endif #endif
occ->next = t; occ->next = t;
...@@ -145,40 +143,26 @@ et_check_occ_sanity (struct et_occ *occ) ...@@ -145,40 +143,26 @@ et_check_occ_sanity (struct et_occ *occ)
if (!occ) if (!occ)
return; return;
if (occ->parent == occ) gcc_assert (occ->parent != occ);
abort (); gcc_assert (occ->prev != occ);
gcc_assert (occ->next != occ);
if (occ->prev == occ) gcc_assert (!occ->next || occ->next != occ->prev);
abort ();
if (occ->next == occ)
abort ();
if (occ->next && occ->next == occ->prev)
abort ();
if (occ->next) if (occ->next)
{ {
if (occ->next == occ->parent) gcc_assert (occ->next != occ->parent);
abort (); gcc_assert (occ->next->parent == occ);
if (occ->next->parent != occ)
abort ();
} }
if (occ->prev) if (occ->prev)
{ {
if (occ->prev == occ->parent) gcc_assert (occ->prev != occ->parent);
abort (); gcc_assert (occ->prev->parent == occ);
if (occ->prev->parent != occ)
abort ();
} }
if (occ->parent gcc_assert (!occ->parent
&& occ->parent->prev != occ || occ->parent->prev == occ
&& occ->parent->next != occ) || occ->parent->next == occ);
abort ();
} }
/* Checks whether tree rooted at OCC is sane. */ /* Checks whether tree rooted at OCC is sane. */
...@@ -233,8 +217,7 @@ record_path_before_1 (struct et_occ *occ, int depth) ...@@ -233,8 +217,7 @@ record_path_before_1 (struct et_occ *occ, int depth)
fprintf (stderr, "%d (%d); ", ((basic_block) occ->of->data)->index, depth); fprintf (stderr, "%d (%d); ", ((basic_block) occ->of->data)->index, depth);
if (len >= MAX_NODES) gcc_assert (len < MAX_NODES);
abort ();
depths[len] = depth; depths[len] = depth;
datas[len] = occ->of; datas[len] = occ->of;
...@@ -247,8 +230,7 @@ record_path_before_1 (struct et_occ *occ, int depth) ...@@ -247,8 +230,7 @@ record_path_before_1 (struct et_occ *occ, int depth)
mn = m; mn = m;
} }
if (mn != occ->min + depth - occ->depth) gcc_assert (mn == occ->min + depth - occ->depth);
abort ();
return mn; return mn;
} }
...@@ -285,9 +267,7 @@ check_path_after_1 (struct et_occ *occ, int depth) ...@@ -285,9 +267,7 @@ check_path_after_1 (struct et_occ *occ, int depth)
} }
len--; len--;
if (depths[len] != depth gcc_assert (depths[len] == depth && datas[len] == occ->of);
|| datas[len] != occ->of)
abort ();
if (occ->prev) if (occ->prev)
{ {
...@@ -296,8 +276,7 @@ check_path_after_1 (struct et_occ *occ, int depth) ...@@ -296,8 +276,7 @@ check_path_after_1 (struct et_occ *occ, int depth)
mn = m; mn = m;
} }
if (mn != occ->min + depth - occ->depth) gcc_assert (mn == occ->min + depth - occ->depth);
abort ();
return mn; return mn;
} }
...@@ -312,8 +291,7 @@ check_path_after (struct et_occ *occ) ...@@ -312,8 +291,7 @@ check_path_after (struct et_occ *occ)
occ = occ->parent; occ = occ->parent;
check_path_after_1 (occ, 0); check_path_after_1 (occ, 0);
if (len != 0) gcc_assert (!len);
abort ();
} }
#endif #endif
......
...@@ -460,8 +460,7 @@ gen_eh_region (enum eh_region_type type, struct eh_region *outer) ...@@ -460,8 +460,7 @@ gen_eh_region (enum eh_region_type type, struct eh_region *outer)
struct eh_region *new; struct eh_region *new;
#ifdef ENABLE_CHECKING #ifdef ENABLE_CHECKING
if (! doing_eh (0)) gcc_assert (doing_eh (0));
abort ();
#endif #endif
/* Insert a new blank region as a leaf in the tree. */ /* Insert a new blank region as a leaf in the tree. */
...@@ -688,8 +687,7 @@ resolve_one_fixup_region (struct eh_region *fixup) ...@@ -688,8 +687,7 @@ resolve_one_fixup_region (struct eh_region *fixup)
&& cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp) && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
break; break;
} }
if (j > n) gcc_assert (j <= n);
abort ();
real = cleanup->outer; real = cleanup->outer;
if (real && real->type == ERT_FIXUP) if (real && real->type == ERT_FIXUP)
...@@ -811,14 +809,12 @@ remove_unreachable_regions (rtx insns) ...@@ -811,14 +809,12 @@ remove_unreachable_regions (rtx insns)
if (r->resume) if (r->resume)
{ {
if (uid_region_num[INSN_UID (r->resume)]) gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
abort ();
uid_region_num[INSN_UID (r->resume)] = i; uid_region_num[INSN_UID (r->resume)] = i;
} }
if (r->label) if (r->label)
{ {
if (uid_region_num[INSN_UID (r->label)]) gcc_assert (!uid_region_num[INSN_UID (r->label)]);
abort ();
uid_region_num[INSN_UID (r->label)] = i; uid_region_num[INSN_UID (r->label)] = i;
} }
} }
...@@ -942,8 +938,7 @@ convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur) ...@@ -942,8 +938,7 @@ convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
} }
} }
if (sp != orig_sp) gcc_assert (sp == orig_sp);
abort ();
} }
static void static void
...@@ -1006,8 +1001,7 @@ add_ehl_entry (rtx label, struct eh_region *region) ...@@ -1006,8 +1001,7 @@ add_ehl_entry (rtx label, struct eh_region *region)
label. After landing pad creation, the exception handlers may label. After landing pad creation, the exception handlers may
share landing pads. This is ok, since maybe_remove_eh_handler share landing pads. This is ok, since maybe_remove_eh_handler
only requires the 1-1 mapping before landing pad creation. */ only requires the 1-1 mapping before landing pad creation. */
if (*slot && !cfun->eh->built_landing_pads) gcc_assert (!*slot || cfun->eh->built_landing_pads);
abort ();
*slot = entry; *slot = entry;
} }
...@@ -1104,7 +1098,7 @@ duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map) ...@@ -1104,7 +1098,7 @@ duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
n->u.throw.type = o->u.throw.type; n->u.throw.type = o->u.throw.type;
default: default:
abort (); gcc_unreachable ();
} }
if (o->label) if (o->label)
...@@ -1112,8 +1106,7 @@ duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map) ...@@ -1112,8 +1106,7 @@ duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
if (o->resume) if (o->resume)
{ {
n->resume = map->insn_map[INSN_UID (o->resume)]; n->resume = map->insn_map[INSN_UID (o->resume)];
if (n->resume == NULL) gcc_assert (n->resume);
abort ();
} }
return n; return n;
...@@ -1586,7 +1579,7 @@ build_post_landing_pads (void) ...@@ -1586,7 +1579,7 @@ build_post_landing_pads (void)
break; break;
default: default:
abort (); gcc_unreachable ();
} }
} }
} }
...@@ -1657,8 +1650,7 @@ connect_post_landing_pads (void) ...@@ -1657,8 +1650,7 @@ connect_post_landing_pads (void)
end_sequence (); end_sequence ();
barrier = emit_insn_before (seq, region->resume); barrier = emit_insn_before (seq, region->resume);
/* Avoid duplicate barrier. */ /* Avoid duplicate barrier. */
if (!BARRIER_P (barrier)) gcc_assert (BARRIER_P (barrier));
abort ();
delete_insn (barrier); delete_insn (barrier);
delete_insn (region->resume); delete_insn (region->resume);
...@@ -2049,8 +2041,7 @@ sjlj_emit_function_exit (void) ...@@ -2049,8 +2041,7 @@ sjlj_emit_function_exit (void)
/* Figure out whether the place we are supposed to insert libcall /* Figure out whether the place we are supposed to insert libcall
is inside the last basic block or after it. In the other case is inside the last basic block or after it. In the other case
we need to emit to edge. */ we need to emit to edge. */
if (e->src->next_bb != EXIT_BLOCK_PTR) gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
abort ();
for (insn = NEXT_INSN (BB_END (e->src)); insn; insn = NEXT_INSN (insn)) for (insn = NEXT_INSN (BB_END (e->src)); insn; insn = NEXT_INSN (insn))
if (insn == cfun->eh->sjlj_exit_after) if (insn == cfun->eh->sjlj_exit_after)
break; break;
...@@ -2258,8 +2249,7 @@ remove_exception_handler_label (rtx label) ...@@ -2258,8 +2249,7 @@ remove_exception_handler_label (rtx label)
tmp.label = label; tmp.label = label;
slot = (struct ehl_map_entry **) slot = (struct ehl_map_entry **)
htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT); htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
if (! slot) gcc_assert (slot);
abort ();
htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot); htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
} }
...@@ -2330,8 +2320,7 @@ remove_eh_handler (struct eh_region *region) ...@@ -2330,8 +2320,7 @@ remove_eh_handler (struct eh_region *region)
try->type == ERT_CATCH; try->type == ERT_CATCH;
try = try->next_peer) try = try->next_peer)
continue; continue;
if (try->type != ERT_TRY) gcc_assert (try->type == ERT_TRY);
abort ();
next = region->u.catch.next_catch; next = region->u.catch.next_catch;
prev = region->u.catch.prev_catch; prev = region->u.catch.prev_catch;
...@@ -2642,10 +2631,11 @@ reachable_next_level (struct eh_region *region, tree type_thrown, ...@@ -2642,10 +2631,11 @@ reachable_next_level (struct eh_region *region, tree type_thrown,
case ERT_FIXUP: case ERT_FIXUP:
case ERT_UNKNOWN: case ERT_UNKNOWN:
/* Shouldn't see these here. */ /* Shouldn't see these here. */
gcc_unreachable ();
break; break;
default:
gcc_unreachable ();
} }
abort ();
} }
/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */ /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
...@@ -3259,7 +3249,7 @@ collect_one_action_chain (htab_t ar_hash, struct eh_region *region) ...@@ -3259,7 +3249,7 @@ collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
return collect_one_action_chain (ar_hash, region->outer); return collect_one_action_chain (ar_hash, region->outer);
default: default:
abort (); gcc_unreachable ();
} }
} }
...@@ -3763,8 +3753,8 @@ output_function_exception_table (void) ...@@ -3763,8 +3753,8 @@ output_function_exception_table (void)
cgraph_varpool_mark_needed_node (node); cgraph_varpool_mark_needed_node (node);
} }
} }
else if (TREE_CODE (type) != INTEGER_CST) else
abort (); gcc_assert (TREE_CODE (type) == INTEGER_CST);
} }
if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned) if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
......
...@@ -50,8 +50,7 @@ trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode) ...@@ -50,8 +50,7 @@ trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
int width = GET_MODE_BITSIZE (mode); int width = GET_MODE_BITSIZE (mode);
/* You want to truncate to a _what_? */ /* You want to truncate to a _what_? */
if (! SCALAR_INT_MODE_P (mode)) gcc_assert (SCALAR_INT_MODE_P (mode));
abort ();
/* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */ /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
if (mode == BImode) if (mode == BImode)
...@@ -633,8 +632,7 @@ copy_to_mode_reg (enum machine_mode mode, rtx x) ...@@ -633,8 +632,7 @@ copy_to_mode_reg (enum machine_mode mode, rtx x)
if (! general_operand (x, VOIDmode)) if (! general_operand (x, VOIDmode))
x = force_operand (x, temp); x = force_operand (x, temp);
if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode) gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
abort ();
if (x != temp) if (x != temp)
emit_move_insn (temp, x); emit_move_insn (temp, x);
return temp; return temp;
...@@ -1106,11 +1104,10 @@ optimize_save_area_alloca (void) ...@@ -1106,11 +1104,10 @@ optimize_save_area_alloca (void)
Right now only supported port with stack that grow upward Right now only supported port with stack that grow upward
is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */ is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
if (GET_CODE (pat) != SET gcc_assert (GET_CODE (pat) == SET
|| SET_DEST (pat) != stack_pointer_rtx && SET_DEST (pat) == stack_pointer_rtx
|| GET_CODE (SET_SRC (pat)) != MINUS && GET_CODE (SET_SRC (pat)) == MINUS
|| XEXP (SET_SRC (pat), 0) != stack_pointer_rtx) && XEXP (SET_SRC (pat), 0) == stack_pointer_rtx);
abort ();
/* This will now be transformed into a (set REG REG) /* This will now be transformed into a (set REG REG)
so we can just blow away all the other notes. */ so we can just blow away all the other notes. */
...@@ -1134,8 +1131,7 @@ optimize_save_area_alloca (void) ...@@ -1134,8 +1131,7 @@ optimize_save_area_alloca (void)
if (XEXP (srch, 1) == note) if (XEXP (srch, 1) == note)
break; break;
if (srch == NULL_RTX) gcc_assert (srch);
abort ();
XEXP (srch, 1) = XEXP (note, 1); XEXP (srch, 1) = XEXP (note, 1);
} }
...@@ -1229,8 +1225,7 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align) ...@@ -1229,8 +1225,7 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
/* ??? Code below assumes that the save area needs maximal /* ??? Code below assumes that the save area needs maximal
alignment. This constraint may be too strong. */ alignment. This constraint may be too strong. */
if (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT) gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT);
abort ();
if (GET_CODE (size) == CONST_INT) if (GET_CODE (size) == CONST_INT)
{ {
...@@ -1287,8 +1282,8 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align) ...@@ -1287,8 +1282,8 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
/* We ought to be called always on the toplevel and stack ought to be aligned /* We ought to be called always on the toplevel and stack ought to be aligned
properly. */ properly. */
if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)) gcc_assert (!(stack_pointer_delta
abort (); % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
/* If needed, check that we have the required amount of stack. Take into /* If needed, check that we have the required amount of stack. Take into
account what has already been checked. */ account what has already been checked. */
...@@ -1539,8 +1534,7 @@ probe_stack_range (HOST_WIDE_INT first, rtx size) ...@@ -1539,8 +1534,7 @@ probe_stack_range (HOST_WIDE_INT first, rtx size)
1, OPTAB_WIDEN); 1, OPTAB_WIDEN);
#endif #endif
if (temp != test_addr) gcc_assert (temp == test_addr);
abort ();
emit_label (test_lab); emit_label (test_lab);
emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE, emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
...@@ -1594,8 +1588,7 @@ hard_function_value (tree valtype, tree func ATTRIBUTE_UNUSED, ...@@ -1594,8 +1588,7 @@ hard_function_value (tree valtype, tree func ATTRIBUTE_UNUSED,
} }
/* No suitable mode found. */ /* No suitable mode found. */
if (tmpmode == VOIDmode) gcc_assert (tmpmode != VOIDmode);
abort ();
PUT_MODE (val, tmpmode); PUT_MODE (val, tmpmode);
} }
......
...@@ -298,7 +298,7 @@ mode_for_extraction (enum extraction_pattern pattern, int opno) ...@@ -298,7 +298,7 @@ mode_for_extraction (enum extraction_pattern pattern, int opno)
return MAX_MACHINE_MODE; return MAX_MACHINE_MODE;
default: default:
abort (); gcc_unreachable ();
} }
if (opno == -1) if (opno == -1)
...@@ -386,10 +386,9 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, ...@@ -386,10 +386,9 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
/* We could handle this, but we should always be called with a pseudo /* We could handle this, but we should always be called with a pseudo
for our targets and all insns should take them as outputs. */ for our targets and all insns should take them as outputs. */
if (! (*insn_data[icode].operand[0].predicate) (dest, mode0) gcc_assert ((*insn_data[icode].operand[0].predicate) (dest, mode0)
|| ! (*insn_data[icode].operand[1].predicate) (src, mode1) && (*insn_data[icode].operand[1].predicate) (src, mode1)
|| ! (*insn_data[icode].operand[2].predicate) (rtxpos, mode2)) && (*insn_data[icode].operand[2].predicate) (rtxpos, mode2));
abort ();
pat = GEN_FCN (icode) (dest, src, rtxpos); pat = GEN_FCN (icode) (dest, src, rtxpos);
seq = get_insns (); seq = get_insns ();
end_sequence (); end_sequence ();
...@@ -433,15 +432,14 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, ...@@ -433,15 +432,14 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
{ {
if (GET_CODE (op0) == SUBREG) if (GET_CODE (op0) == SUBREG)
{ {
if (GET_MODE (SUBREG_REG (op0)) == fieldmode /* Else we've got some float mode source being extracted
|| GET_MODE_CLASS (fieldmode) == MODE_INT into a different float mode destination -- this
|| GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT) combination of subregs results in Severe Tire
op0 = SUBREG_REG (op0); Damage. */
else gcc_assert (GET_MODE (SUBREG_REG (op0)) == fieldmode
/* Else we've got some float mode source being extracted into || GET_MODE_CLASS (fieldmode) == MODE_INT
a different float mode destination -- this combination of || GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT);
subregs results in Severe Tire Damage. */ op0 = SUBREG_REG (op0);
abort ();
} }
if (REG_P (op0)) if (REG_P (op0))
op0 = gen_rtx_SUBREG (fieldmode, op0, byte_offset); op0 = gen_rtx_SUBREG (fieldmode, op0, byte_offset);
...@@ -462,10 +460,11 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, ...@@ -462,10 +460,11 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
{ {
if (MEM_P (op0)) if (MEM_P (op0))
op0 = adjust_address (op0, imode, 0); op0 = adjust_address (op0, imode, 0);
else if (imode != BLKmode)
op0 = gen_lowpart (imode, op0);
else else
abort (); {
gcc_assert (imode != BLKmode);
op0 = gen_lowpart (imode, op0);
}
} }
} }
...@@ -510,15 +509,13 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, ...@@ -510,15 +509,13 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
if (GET_CODE (op0) == SUBREG) if (GET_CODE (op0) == SUBREG)
{ {
if (GET_MODE (SUBREG_REG (op0)) == fieldmode /* Else we've got some float mode source being extracted into
|| GET_MODE_CLASS (fieldmode) == MODE_INT a different float mode destination -- this combination of
|| GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT) subregs results in Severe Tire Damage. */
op0 = SUBREG_REG (op0); gcc_assert (GET_MODE (SUBREG_REG (op0)) == fieldmode
else || GET_MODE_CLASS (fieldmode) == MODE_INT
/* Else we've got some float mode source being extracted into || GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT);
a different float mode destination -- this combination of op0 = SUBREG_REG (op0);
subregs results in Severe Tire Damage. */
abort ();
} }
emit_insn (GEN_FCN (icode) emit_insn (GEN_FCN (icode)
...@@ -589,12 +586,10 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, ...@@ -589,12 +586,10 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
pseudo. We can trivially remove a SUBREG that does not pseudo. We can trivially remove a SUBREG that does not
change the size of the operand. Such a SUBREG may have been change the size of the operand. Such a SUBREG may have been
added above. Otherwise, abort. */ added above. Otherwise, abort. */
if (GET_CODE (op0) == SUBREG gcc_assert (GET_CODE (op0) == SUBREG
&& (GET_MODE_SIZE (GET_MODE (op0)) && (GET_MODE_SIZE (GET_MODE (op0))
== GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))) == GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))));
op0 = SUBREG_REG (op0); op0 = SUBREG_REG (op0);
else
abort ();
} }
op0 = gen_rtx_SUBREG (mode_for_size (BITS_PER_WORD, MODE_INT, 0), op0 = gen_rtx_SUBREG (mode_for_size (BITS_PER_WORD, MODE_INT, 0),
op0, (offset * UNITS_PER_WORD)); op0, (offset * UNITS_PER_WORD));
...@@ -731,12 +726,12 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, ...@@ -731,12 +726,12 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
} }
else if (GET_CODE (value) == CONST_INT) else if (GET_CODE (value) == CONST_INT)
value1 = gen_int_mode (INTVAL (value), maxmode); value1 = gen_int_mode (INTVAL (value), maxmode);
else if (!CONSTANT_P (value)) else
/* Parse phase is supposed to make VALUE's data type /* Parse phase is supposed to make VALUE's data type
match that of the component reference, which is a type match that of the component reference, which is a type
at least as wide as the field; so VALUE should have at least as wide as the field; so VALUE should have
a mode that corresponds to that type. */ a mode that corresponds to that type. */
abort (); gcc_assert (CONSTANT_P (value));
} }
/* If this machine's insv insists on a register, /* If this machine's insv insists on a register,
...@@ -790,8 +785,7 @@ store_fixed_bit_field (rtx op0, unsigned HOST_WIDE_INT offset, ...@@ -790,8 +785,7 @@ store_fixed_bit_field (rtx op0, unsigned HOST_WIDE_INT offset,
if (REG_P (op0) || GET_CODE (op0) == SUBREG) if (REG_P (op0) || GET_CODE (op0) == SUBREG)
{ {
if (offset != 0) gcc_assert (!offset);
abort ();
/* Special treatment for a bit field split across two registers. */ /* Special treatment for a bit field split across two registers. */
if (bitsize + bitpos > BITS_PER_WORD) if (bitsize + bitpos > BITS_PER_WORD)
{ {
...@@ -1146,10 +1140,9 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, ...@@ -1146,10 +1140,9 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
/* We could handle this, but we should always be called with a pseudo /* We could handle this, but we should always be called with a pseudo
for our targets and all insns should take them as outputs. */ for our targets and all insns should take them as outputs. */
if (! (*insn_data[icode].operand[0].predicate) (dest, mode0) gcc_assert ((*insn_data[icode].operand[0].predicate) (dest, mode0)
|| ! (*insn_data[icode].operand[1].predicate) (src, mode1) && (*insn_data[icode].operand[1].predicate) (src, mode1)
|| ! (*insn_data[icode].operand[2].predicate) (rtxpos, mode2)) && (*insn_data[icode].operand[2].predicate) (rtxpos, mode2));
abort ();
pat = GEN_FCN (icode) (dest, src, rtxpos); pat = GEN_FCN (icode) (dest, src, rtxpos);
seq = get_insns (); seq = get_insns ();
...@@ -1170,10 +1163,11 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, ...@@ -1170,10 +1163,11 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
{ {
if (MEM_P (op0)) if (MEM_P (op0))
op0 = adjust_address (op0, imode, 0); op0 = adjust_address (op0, imode, 0);
else if (imode != BLKmode)
op0 = gen_lowpart (imode, op0);
else else
abort (); {
gcc_assert (imode != BLKmode);
op0 = gen_lowpart (imode, op0);
}
} }
} }
...@@ -1299,8 +1293,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, ...@@ -1299,8 +1293,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
bitnum + bit_offset, 1, target_part, mode, bitnum + bit_offset, 1, target_part, mode,
word_mode); word_mode);
if (target_part == 0) gcc_assert (target_part);
abort ();
if (result_part != target_part) if (result_part != target_part)
emit_move_insn (target_part, result_part); emit_move_insn (target_part, result_part);
...@@ -1346,13 +1339,11 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, ...@@ -1346,13 +1339,11 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
int_mode = int_mode_for_mode (tmode); int_mode = int_mode_for_mode (tmode);
if (int_mode == BLKmode) if (int_mode == BLKmode)
int_mode = int_mode_for_mode (mode); int_mode = int_mode_for_mode (mode);
if (int_mode == BLKmode) /* Should probably push op0 out to memory and then do a load. */
abort (); /* Should probably push op0 out to memory and then gcc_assert (int_mode != BLKmode);
do a load. */
/* OFFSET is the number of words or bytes (UNIT says which) /* OFFSET is the number of words or bytes (UNIT says which)
from STR_RTX to the first word or byte containing part of the field. */ from STR_RTX to the first word or byte containing part of the field. */
if (!MEM_P (op0)) if (!MEM_P (op0))
{ {
if (offset != 0 if (offset != 0
...@@ -2145,8 +2136,7 @@ expand_shift (enum tree_code code, enum machine_mode mode, rtx shifted, ...@@ -2145,8 +2136,7 @@ expand_shift (enum tree_code code, enum machine_mode mode, rtx shifted,
define_expand for lshrsi3 was added to vax.md. */ define_expand for lshrsi3 was added to vax.md. */
} }
if (temp == 0) gcc_assert (temp);
abort ();
return temp; return temp;
} }
...@@ -2649,7 +2639,7 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val, ...@@ -2649,7 +2639,7 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val,
val_so_far = 1; val_so_far = 1;
} }
else else
abort (); gcc_unreachable ();
for (opno = 1; opno < alg->ops; opno++) for (opno = 1; opno < alg->ops; opno++)
{ {
...@@ -2727,7 +2717,7 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val, ...@@ -2727,7 +2717,7 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val,
break; break;
default: default:
abort (); gcc_unreachable ();
} }
/* Write a REG_EQUAL note on the last insn so that we can cse /* Write a REG_EQUAL note on the last insn so that we can cse
...@@ -2762,8 +2752,7 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val, ...@@ -2762,8 +2752,7 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val,
in the result mode, to avoid sign-/zero-extension confusion. */ in the result mode, to avoid sign-/zero-extension confusion. */
val &= GET_MODE_MASK (mode); val &= GET_MODE_MASK (mode);
val_so_far &= GET_MODE_MASK (mode); val_so_far &= GET_MODE_MASK (mode);
if (val != val_so_far) gcc_assert (val == val_so_far);
abort ();
return accum; return accum;
} }
...@@ -2848,8 +2837,7 @@ expand_mult (enum machine_mode mode, rtx op0, rtx op1, rtx target, ...@@ -2848,8 +2837,7 @@ expand_mult (enum machine_mode mode, rtx op0, rtx op1, rtx target,
&& flag_trapv && (GET_MODE_CLASS(mode) == MODE_INT) && flag_trapv && (GET_MODE_CLASS(mode) == MODE_INT)
? smulv_optab : smul_optab, ? smulv_optab : smul_optab,
op0, op1, target, unsignedp, OPTAB_LIB_WIDEN); op0, op1, target, unsignedp, OPTAB_LIB_WIDEN);
if (op0 == 0) gcc_assert (op0);
abort ();
return op0; return op0;
} }
...@@ -2893,18 +2881,15 @@ choose_multiplier (unsigned HOST_WIDE_INT d, int n, int precision, ...@@ -2893,18 +2881,15 @@ choose_multiplier (unsigned HOST_WIDE_INT d, int n, int precision,
/* lgup = ceil(log2(divisor)); */ /* lgup = ceil(log2(divisor)); */
lgup = ceil_log2 (d); lgup = ceil_log2 (d);
if (lgup > n) gcc_assert (lgup <= n);
abort ();
pow = n + lgup; pow = n + lgup;
pow2 = n + lgup - precision; pow2 = n + lgup - precision;
if (pow == 2 * HOST_BITS_PER_WIDE_INT) /* We could handle this with some effort, but this case is much
{ better handled directly with a scc insn, so rely on caller using
/* We could handle this with some effort, but this case is much better that. */
handled directly with a scc insn, so rely on caller using that. */ gcc_assert (pow != 2 * HOST_BITS_PER_WIDE_INT);
abort ();
}
/* mlow = 2^(N + lgup)/d */ /* mlow = 2^(N + lgup)/d */
if (pow >= HOST_BITS_PER_WIDE_INT) if (pow >= HOST_BITS_PER_WIDE_INT)
...@@ -2928,13 +2913,11 @@ choose_multiplier (unsigned HOST_WIDE_INT d, int n, int precision, ...@@ -2928,13 +2913,11 @@ choose_multiplier (unsigned HOST_WIDE_INT d, int n, int precision,
div_and_round_double (TRUNC_DIV_EXPR, 1, nl, nh, d, (HOST_WIDE_INT) 0, div_and_round_double (TRUNC_DIV_EXPR, 1, nl, nh, d, (HOST_WIDE_INT) 0,
&mhigh_lo, &mhigh_hi, &dummy1, &dummy2); &mhigh_lo, &mhigh_hi, &dummy1, &dummy2);
if (mhigh_hi && nh - d >= d) gcc_assert (!mhigh_hi || nh - d < d);
abort (); gcc_assert (mhigh_hi <= 1 && mlow_hi <= 1);
if (mhigh_hi > 1 || mlow_hi > 1)
abort ();
/* Assert that mlow < mhigh. */ /* Assert that mlow < mhigh. */
if (! (mlow_hi < mhigh_hi || (mlow_hi == mhigh_hi && mlow_lo < mhigh_lo))) gcc_assert (mlow_hi < mhigh_hi
abort (); || (mlow_hi == mhigh_hi && mlow_lo < mhigh_lo));
/* If precision == N, then mlow, mhigh exceed 2^N /* If precision == N, then mlow, mhigh exceed 2^N
(but they do not exceed 2^(N+1)). */ (but they do not exceed 2^(N+1)). */
...@@ -3156,8 +3139,7 @@ expand_mult_highpart (enum machine_mode mode, rtx op0, ...@@ -3156,8 +3139,7 @@ expand_mult_highpart (enum machine_mode mode, rtx op0,
rtx op1, tem; rtx op1, tem;
/* We can't support modes wider than HOST_BITS_PER_INT. */ /* We can't support modes wider than HOST_BITS_PER_INT. */
if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) gcc_assert (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT);
abort ();
op1 = gen_int_mode (cnst1, wider_mode); op1 = gen_int_mode (cnst1, wider_mode);
cnst1 &= GET_MODE_MASK (mode); cnst1 &= GET_MODE_MASK (mode);
...@@ -3662,8 +3644,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode, ...@@ -3662,8 +3644,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode,
mh = choose_multiplier (d >> pre_shift, size, mh = choose_multiplier (d >> pre_shift, size,
size - pre_shift, size - pre_shift,
&ml, &post_shift, &dummy); &ml, &post_shift, &dummy);
if (mh) gcc_assert (!mh);
abort ();
} }
else else
pre_shift = 0; pre_shift = 0;
...@@ -3939,8 +3920,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode, ...@@ -3939,8 +3920,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode,
mh = choose_multiplier (d, size, size - 1, mh = choose_multiplier (d, size, size - 1,
&ml, &post_shift, &lgup); &ml, &post_shift, &lgup);
if (mh) gcc_assert (!mh);
abort ();
if (post_shift < BITS_PER_WORD if (post_shift < BITS_PER_WORD
&& size - 1 < BITS_PER_WORD) && size - 1 < BITS_PER_WORD)
...@@ -4398,7 +4378,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode, ...@@ -4398,7 +4378,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode,
return gen_lowpart (mode, rem_flag ? remainder : quotient); return gen_lowpart (mode, rem_flag ? remainder : quotient);
default: default:
abort (); gcc_unreachable ();
} }
if (quotient == 0) if (quotient == 0)
...@@ -4899,20 +4879,23 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1, ...@@ -4899,20 +4879,23 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1,
= compare_from_rtx (op0, op1, code, unsignedp, mode, NULL_RTX); = compare_from_rtx (op0, op1, code, unsignedp, mode, NULL_RTX);
if (CONSTANT_P (comparison)) if (CONSTANT_P (comparison))
{ {
if (GET_CODE (comparison) == CONST_INT) switch (GET_CODE (comparison))
{ {
case CONST_INT:
if (comparison == const0_rtx) if (comparison == const0_rtx)
return const0_rtx; return const0_rtx;
} break;
#ifdef FLOAT_STORE_FLAG_VALUE #ifdef FLOAT_STORE_FLAG_VALUE
else if (GET_CODE (comparison) == CONST_DOUBLE) case CONST_DOUBLE:
{
if (comparison == CONST0_RTX (GET_MODE (comparison))) if (comparison == CONST0_RTX (GET_MODE (comparison)))
return const0_rtx; return const0_rtx;
} break;
#endif #endif
else default:
abort (); gcc_unreachable ();
}
if (normalizep == 1) if (normalizep == 1)
return const1_rtx; return const1_rtx;
if (normalizep == -1) if (normalizep == -1)
...@@ -4987,14 +4970,14 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1, ...@@ -4987,14 +4970,14 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1,
op0 = expand_shift (RSHIFT_EXPR, compare_mode, op0, op0 = expand_shift (RSHIFT_EXPR, compare_mode, op0,
size_int (GET_MODE_BITSIZE (compare_mode) - 1), size_int (GET_MODE_BITSIZE (compare_mode) - 1),
subtarget, normalizep == 1); subtarget, normalizep == 1);
else if (STORE_FLAG_VALUE & 1) else
{ {
gcc_assert (STORE_FLAG_VALUE & 1);
op0 = expand_and (compare_mode, op0, const1_rtx, subtarget); op0 = expand_and (compare_mode, op0, const1_rtx, subtarget);
if (normalizep == -1) if (normalizep == -1)
op0 = expand_unop (compare_mode, neg_optab, op0, op0, 0); op0 = expand_unop (compare_mode, neg_optab, op0, op0, 0);
} }
else
abort ();
/* If we were converting to a smaller mode, do the /* If we were converting to a smaller mode, do the
conversion now. */ conversion now. */
...@@ -5262,19 +5245,17 @@ do_cmp_and_jump (rtx arg1, rtx arg2, enum rtx_code op, enum machine_mode mode, ...@@ -5262,19 +5245,17 @@ do_cmp_and_jump (rtx arg1, rtx arg2, enum rtx_code op, enum machine_mode mode,
/* do_jump_by_parts_equality_rtx compares with zero. Luckily /* do_jump_by_parts_equality_rtx compares with zero. Luckily
that's the only equality operations we do */ that's the only equality operations we do */
case EQ: case EQ:
if (arg2 != const0_rtx || mode != GET_MODE(arg1)) gcc_assert (arg2 == const0_rtx && mode == GET_MODE(arg1));
abort ();
do_jump_by_parts_equality_rtx (arg1, label2, label); do_jump_by_parts_equality_rtx (arg1, label2, label);
break; break;
case NE: case NE:
if (arg2 != const0_rtx || mode != GET_MODE(arg1)) gcc_assert (arg2 == const0_rtx && mode == GET_MODE(arg1));
abort ();
do_jump_by_parts_equality_rtx (arg1, label, label2); do_jump_by_parts_equality_rtx (arg1, label, label2);
break; break;
default: default:
abort (); gcc_unreachable ();
} }
emit_label (label2); emit_label (label2);
......
...@@ -335,8 +335,7 @@ convert_move (rtx to, rtx from, int unsignedp) ...@@ -335,8 +335,7 @@ convert_move (rtx to, rtx from, int unsignedp)
: (unsignedp ? ZERO_EXTEND : SIGN_EXTEND)); : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
if (to_real != from_real) gcc_assert (to_real == from_real);
abort ();
/* If the source and destination are already the same, then there's /* If the source and destination are already the same, then there's
nothing to do. */ nothing to do. */
...@@ -353,8 +352,7 @@ convert_move (rtx to, rtx from, int unsignedp) ...@@ -353,8 +352,7 @@ convert_move (rtx to, rtx from, int unsignedp)
&& SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp) && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
from = gen_lowpart (to_mode, from), from_mode = to_mode; from = gen_lowpart (to_mode, from), from_mode = to_mode;
if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to)) gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
abort ();
if (to_mode == from_mode if (to_mode == from_mode
|| (from_mode == VOIDmode && CONSTANT_P (from))) || (from_mode == VOIDmode && CONSTANT_P (from)))
...@@ -365,8 +363,7 @@ convert_move (rtx to, rtx from, int unsignedp) ...@@ -365,8 +363,7 @@ convert_move (rtx to, rtx from, int unsignedp)
if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
{ {
if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode)) gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
abort ();
if (VECTOR_MODE_P (to_mode)) if (VECTOR_MODE_P (to_mode))
from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
...@@ -389,12 +386,13 @@ convert_move (rtx to, rtx from, int unsignedp) ...@@ -389,12 +386,13 @@ convert_move (rtx to, rtx from, int unsignedp)
rtx value, insns; rtx value, insns;
convert_optab tab; convert_optab tab;
gcc_assert (GET_MODE_PRECISION (from_mode)
!= GET_MODE_PRECISION (to_mode));
if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)) if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
tab = sext_optab; tab = sext_optab;
else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
tab = trunc_optab;
else else
abort (); tab = trunc_optab;
/* Try converting directly if the insn is supported. */ /* Try converting directly if the insn is supported. */
...@@ -409,9 +407,8 @@ convert_move (rtx to, rtx from, int unsignedp) ...@@ -409,9 +407,8 @@ convert_move (rtx to, rtx from, int unsignedp)
/* Otherwise use a libcall. */ /* Otherwise use a libcall. */
libcall = tab->handlers[to_mode][from_mode].libfunc; libcall = tab->handlers[to_mode][from_mode].libfunc;
if (!libcall) /* Is this conversion implemented yet? */
/* This conversion is not implemented yet. */ gcc_assert (libcall);
abort ();
start_sequence (); start_sequence ();
value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
...@@ -433,9 +430,8 @@ convert_move (rtx to, rtx from, int unsignedp) ...@@ -433,9 +430,8 @@ convert_move (rtx to, rtx from, int unsignedp)
enum machine_mode full_mode enum machine_mode full_mode
= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT); = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
if (trunc_optab->handlers[to_mode][full_mode].insn_code gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
== CODE_FOR_nothing) != CODE_FOR_nothing);
abort ();
if (full_mode != from_mode) if (full_mode != from_mode)
from = convert_to_mode (full_mode, from, unsignedp); from = convert_to_mode (full_mode, from, unsignedp);
...@@ -448,9 +444,8 @@ convert_move (rtx to, rtx from, int unsignedp) ...@@ -448,9 +444,8 @@ convert_move (rtx to, rtx from, int unsignedp)
enum machine_mode full_mode enum machine_mode full_mode
= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT); = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
if (sext_optab->handlers[full_mode][from_mode].insn_code gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
== CODE_FOR_nothing) != CODE_FOR_nothing);
abort ();
emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
to, from, UNKNOWN); to, from, UNKNOWN);
...@@ -557,8 +552,7 @@ convert_move (rtx to, rtx from, int unsignedp) ...@@ -557,8 +552,7 @@ convert_move (rtx to, rtx from, int unsignedp)
int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i); int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
rtx subword = operand_subword (to, index, 1, to_mode); rtx subword = operand_subword (to, index, 1, to_mode);
if (subword == 0) gcc_assert (subword);
abort ();
if (fill_value != subword) if (fill_value != subword)
emit_move_insn (subword, fill_value); emit_move_insn (subword, fill_value);
...@@ -683,7 +677,7 @@ convert_move (rtx to, rtx from, int unsignedp) ...@@ -683,7 +677,7 @@ convert_move (rtx to, rtx from, int unsignedp)
} }
/* Mode combination is not recognized. */ /* Mode combination is not recognized. */
abort (); gcc_unreachable ();
} }
/* Return an rtx for a value that would result /* Return an rtx for a value that would result
...@@ -797,8 +791,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns ...@@ -797,8 +791,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns
subreg operation. */ subreg operation. */
if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode) if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
{ {
if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode)) gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
abort ();
return simplify_gen_subreg (mode, x, oldmode, 0); return simplify_gen_subreg (mode, x, oldmode, 0);
} }
...@@ -964,15 +957,13 @@ move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, ...@@ -964,15 +957,13 @@ move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
} }
/* The code above should have handled everything. */ /* The code above should have handled everything. */
if (data.len > 0) gcc_assert (!data.len);
abort ();
if (endp) if (endp)
{ {
rtx to1; rtx to1;
if (data.reverse) gcc_assert (!data.reverse);
abort ();
if (data.autinc_to) if (data.autinc_to)
{ {
if (endp == 2) if (endp == 2)
...@@ -1045,8 +1036,7 @@ move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align, ...@@ -1045,8 +1036,7 @@ move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
max_size = GET_MODE_SIZE (mode); max_size = GET_MODE_SIZE (mode);
} }
if (l) gcc_assert (!l);
abort ();
return n_insns; return n_insns;
} }
...@@ -1095,7 +1085,7 @@ move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode, ...@@ -1095,7 +1085,7 @@ move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
#ifdef PUSH_ROUNDING #ifdef PUSH_ROUNDING
emit_single_push_insn (mode, from1, NULL); emit_single_push_insn (mode, from1, NULL);
#else #else
abort (); gcc_unreachable ();
#endif #endif
} }
...@@ -1149,17 +1139,14 @@ emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method) ...@@ -1149,17 +1139,14 @@ emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
break; break;
default: default:
abort (); gcc_unreachable ();
} }
align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
if (!MEM_P (x)) gcc_assert (MEM_P (x));
abort (); gcc_assert (MEM_P (y));
if (!MEM_P (y)) gcc_assert (size);
abort ();
if (size == 0)
abort ();
/* Make sure we've got BLKmode addresses; store_one_arg can decide that /* Make sure we've got BLKmode addresses; store_one_arg can decide that
block copy is more efficient for other large modes, e.g. DCmode. */ block copy is more efficient for other large modes, e.g. DCmode. */
...@@ -1530,8 +1517,7 @@ move_block_from_reg (int regno, rtx x, int nregs) ...@@ -1530,8 +1517,7 @@ move_block_from_reg (int regno, rtx x, int nregs)
{ {
rtx tem = operand_subword (x, i, 1, BLKmode); rtx tem = operand_subword (x, i, 1, BLKmode);
if (tem == 0) gcc_assert (tem);
abort ();
emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i)); emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
} }
...@@ -1549,8 +1535,7 @@ gen_group_rtx (rtx orig) ...@@ -1549,8 +1535,7 @@ gen_group_rtx (rtx orig)
int i, length; int i, length;
rtx *tmps; rtx *tmps;
if (GET_CODE (orig) != PARALLEL) gcc_assert (GET_CODE (orig) == PARALLEL);
abort ();
length = XVECLEN (orig, 0); length = XVECLEN (orig, 0);
tmps = alloca (sizeof (rtx) * length); tmps = alloca (sizeof (rtx) * length);
...@@ -1583,8 +1568,7 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize) ...@@ -1583,8 +1568,7 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
rtx *tmps, src; rtx *tmps, src;
int start, i; int start, i;
if (GET_CODE (dst) != PARALLEL) gcc_assert (GET_CODE (dst) == PARALLEL);
abort ();
/* Check for a NULL entry, used to indicate that the parameter goes /* Check for a NULL entry, used to indicate that the parameter goes
both on the stack and in registers. */ both on the stack and in registers. */
...@@ -1618,8 +1602,7 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize) ...@@ -1618,8 +1602,7 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
) )
shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
bytelen = ssize - bytepos; bytelen = ssize - bytepos;
if (bytelen <= 0) gcc_assert (bytelen > 0);
abort ();
} }
/* If we won't be loading directly from memory, protect the real source /* If we won't be loading directly from memory, protect the real source
...@@ -1668,14 +1651,15 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize) ...@@ -1668,14 +1651,15 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
(bytepos % slen0) * BITS_PER_UNIT, (bytepos % slen0) * BITS_PER_UNIT,
1, NULL_RTX, mode, mode); 1, NULL_RTX, mode, mode);
} }
else if (bytepos == 0) else
{ {
rtx mem = assign_stack_temp (GET_MODE (src), slen, 0); rtx mem;
gcc_assert (!bytepos);
mem = assign_stack_temp (GET_MODE (src), slen, 0);
emit_move_insn (mem, src); emit_move_insn (mem, src);
tmps[i] = adjust_address (mem, mode, 0); tmps[i] = adjust_address (mem, mode, 0);
} }
else
abort ();
} }
/* FIXME: A SIMD parallel will eventually lead to a subreg of a /* FIXME: A SIMD parallel will eventually lead to a subreg of a
SIMD register, which is currently broken. While we get GCC SIMD register, which is currently broken. While we get GCC
...@@ -1719,10 +1703,9 @@ emit_group_move (rtx dst, rtx src) ...@@ -1719,10 +1703,9 @@ emit_group_move (rtx dst, rtx src)
{ {
int i; int i;
if (GET_CODE (src) != PARALLEL gcc_assert (GET_CODE (src) == PARALLEL
|| GET_CODE (dst) != PARALLEL && GET_CODE (dst) == PARALLEL
|| XVECLEN (src, 0) != XVECLEN (dst, 0)) && XVECLEN (src, 0) == XVECLEN (dst, 0));
abort ();
/* Skip first entry if NULL. */ /* Skip first entry if NULL. */
for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++) for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
...@@ -1741,8 +1724,7 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) ...@@ -1741,8 +1724,7 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
rtx *tmps, dst; rtx *tmps, dst;
int start, i; int start, i;
if (GET_CODE (src) != PARALLEL) gcc_assert (GET_CODE (src) == PARALLEL);
abort ();
/* Check for a NULL entry, used to indicate that the parameter goes /* Check for a NULL entry, used to indicate that the parameter goes
both on the stack and in registers. */ both on the stack and in registers. */
...@@ -1829,8 +1811,9 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) ...@@ -1829,8 +1811,9 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
dest = XEXP (dst, 1); dest = XEXP (dst, 1);
} }
else if (bytepos == 0 && XVECLEN (src, 0)) else
{ {
gcc_assert (bytepos == 0 && XVECLEN (src, 0));
dest = assign_stack_temp (GET_MODE (dest), dest = assign_stack_temp (GET_MODE (dest),
GET_MODE_SIZE (GET_MODE (dest)), 0); GET_MODE_SIZE (GET_MODE (dest)), 0);
emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos), emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
...@@ -1838,8 +1821,6 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) ...@@ -1838,8 +1821,6 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
dst = dest; dst = dest;
break; break;
} }
else
abort ();
} }
/* Optimize the access just a bit. */ /* Optimize the access just a bit. */
...@@ -1947,10 +1928,8 @@ copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type) ...@@ -1947,10 +1928,8 @@ copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
void void
use_reg (rtx *call_fusage, rtx reg) use_reg (rtx *call_fusage, rtx reg)
{ {
if (!REG_P (reg) gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
|| REGNO (reg) >= FIRST_PSEUDO_REGISTER)
abort ();
*call_fusage *call_fusage
= gen_rtx_EXPR_LIST (VOIDmode, = gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_USE (VOIDmode, reg), *call_fusage); gen_rtx_USE (VOIDmode, reg), *call_fusage);
...@@ -1964,8 +1943,7 @@ use_regs (rtx *call_fusage, int regno, int nregs) ...@@ -1964,8 +1943,7 @@ use_regs (rtx *call_fusage, int regno, int nregs)
{ {
int i; int i;
if (regno + nregs > FIRST_PSEUDO_REGISTER) gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
abort ();
for (i = 0; i < nregs; i++) for (i = 0; i < nregs; i++)
use_reg (call_fusage, regno_reg_rtx[regno + i]); use_reg (call_fusage, regno_reg_rtx[regno + i]);
...@@ -2081,8 +2059,7 @@ can_store_by_pieces (unsigned HOST_WIDE_INT len, ...@@ -2081,8 +2059,7 @@ can_store_by_pieces (unsigned HOST_WIDE_INT len,
} }
/* The code above should have handled everything. */ /* The code above should have handled everything. */
if (l != 0) gcc_assert (!l);
abort ();
} }
return 1; return 1;
...@@ -2105,13 +2082,11 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, ...@@ -2105,13 +2082,11 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
if (len == 0) if (len == 0)
{ {
if (endp == 2) gcc_assert (endp != 2);
abort ();
return to; return to;
} }
if (! STORE_BY_PIECES_P (len, align)) gcc_assert (STORE_BY_PIECES_P (len, align));
abort ();
data.constfun = constfun; data.constfun = constfun;
data.constfundata = constfundata; data.constfundata = constfundata;
data.len = len; data.len = len;
...@@ -2121,8 +2096,7 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, ...@@ -2121,8 +2096,7 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
{ {
rtx to1; rtx to1;
if (data.reverse) gcc_assert (!data.reverse);
abort ();
if (data.autinc_to) if (data.autinc_to)
{ {
if (endp == 2) if (endp == 2)
...@@ -2271,8 +2245,7 @@ store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, ...@@ -2271,8 +2245,7 @@ store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
} }
/* The code above should have handled everything. */ /* The code above should have handled everything. */
if (data->len != 0) gcc_assert (!data->len);
abort ();
} }
/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
...@@ -2508,8 +2481,8 @@ emit_move_insn (rtx x, rtx y) ...@@ -2508,8 +2481,8 @@ emit_move_insn (rtx x, rtx y)
rtx y_cst = NULL_RTX; rtx y_cst = NULL_RTX;
rtx last_insn, set; rtx last_insn, set;
if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode)) gcc_assert (mode != BLKmode
abort (); && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
if (CONSTANT_P (y)) if (CONSTANT_P (y))
{ {
...@@ -2547,8 +2520,7 @@ emit_move_insn (rtx x, rtx y) ...@@ -2547,8 +2520,7 @@ emit_move_insn (rtx x, rtx y)
&& CONSTANT_ADDRESS_P (XEXP (y, 0))))) && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
y = validize_mem (y); y = validize_mem (y);
if (mode == BLKmode) gcc_assert (mode != BLKmode);
abort ();
last_insn = emit_move_insn_1 (x, y); last_insn = emit_move_insn_1 (x, y);
...@@ -2572,8 +2544,7 @@ emit_move_insn_1 (rtx x, rtx y) ...@@ -2572,8 +2544,7 @@ emit_move_insn_1 (rtx x, rtx y)
enum machine_mode submode; enum machine_mode submode;
enum mode_class class = GET_MODE_CLASS (mode); enum mode_class class = GET_MODE_CLASS (mode);
if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE) gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
abort ();
if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
return return
...@@ -2753,8 +2724,7 @@ emit_move_insn_1 (rtx x, rtx y) ...@@ -2753,8 +2724,7 @@ emit_move_insn_1 (rtx x, rtx y)
if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode)) if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
break; break;
if (tmode == VOIDmode) gcc_assert (tmode != VOIDmode);
abort ();
/* Get X and Y in TMODE. We can't use gen_lowpart here because it /* Get X and Y in TMODE. We can't use gen_lowpart here because it
may call change_address which is not appropriate if we were may call change_address which is not appropriate if we were
...@@ -2803,13 +2773,15 @@ emit_move_insn_1 (rtx x, rtx y) ...@@ -2803,13 +2773,15 @@ emit_move_insn_1 (rtx x, rtx y)
/* This will handle any multi-word or full-word mode that lacks a move_insn /* This will handle any multi-word or full-word mode that lacks a move_insn
pattern. However, you will get better code if you define such patterns, pattern. However, you will get better code if you define such patterns,
even if they must turn into multiple assembler instructions. */ even if they must turn into multiple assembler instructions. */
else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD) else
{ {
rtx last_insn = 0; rtx last_insn = 0;
rtx seq, inner; rtx seq, inner;
int need_clobber; int need_clobber;
int i; int i;
gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
#ifdef PUSH_ROUNDING #ifdef PUSH_ROUNDING
/* If X is a push on the stack, do the push now and replace /* If X is a push on the stack, do the push now and replace
...@@ -2883,8 +2855,7 @@ emit_move_insn_1 (rtx x, rtx y) ...@@ -2883,8 +2855,7 @@ emit_move_insn_1 (rtx x, rtx y)
else if (ypart == 0) else if (ypart == 0)
ypart = operand_subword_force (y, i, mode); ypart = operand_subword_force (y, i, mode);
if (xpart == 0 || ypart == 0) gcc_assert (xpart && ypart);
abort ();
need_clobber |= (GET_CODE (xpart) == SUBREG); need_clobber |= (GET_CODE (xpart) == SUBREG);
...@@ -2907,8 +2878,6 @@ emit_move_insn_1 (rtx x, rtx y) ...@@ -2907,8 +2878,6 @@ emit_move_insn_1 (rtx x, rtx y)
return last_insn; return last_insn;
} }
else
abort ();
} }
/* If Y is representable exactly in a narrower mode, and the target can /* If Y is representable exactly in a narrower mode, and the target can
...@@ -3192,8 +3161,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, ...@@ -3192,8 +3161,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
else else
offset = used % (PARM_BOUNDARY / BITS_PER_UNIT); offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
if (size == 0) gcc_assert (size);
abort ();
used -= offset; used -= offset;
...@@ -3490,8 +3458,7 @@ expand_assignment (tree to, tree from, int want_value) ...@@ -3490,8 +3458,7 @@ expand_assignment (tree to, tree from, int want_value)
{ {
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
if (!MEM_P (to_rtx)) gcc_assert (MEM_P (to_rtx));
abort ();
#ifdef POINTERS_EXTEND_UNSIGNED #ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (offset_rtx) != Pmode) if (GET_MODE (offset_rtx) != Pmode)
...@@ -3821,8 +3788,7 @@ store_expr (tree exp, rtx target, int want_value) ...@@ -3821,8 +3788,7 @@ store_expr (tree exp, rtx target, int want_value)
/* C++ can generate ?: expressions with a throw expression in one /* C++ can generate ?: expressions with a throw expression in one
branch and an rvalue in the other. Here, we resolve attempts to branch and an rvalue in the other. Here, we resolve attempts to
store the throw expression's nonexistent result. */ store the throw expression's nonexistent result. */
if (want_value) gcc_assert (!want_value);
abort ();
expand_expr (exp, const0_rtx, VOIDmode, 0); expand_expr (exp, const0_rtx, VOIDmode, 0);
return NULL_RTX; return NULL_RTX;
} }
...@@ -4304,7 +4270,7 @@ count_type_elements (tree type) ...@@ -4304,7 +4270,7 @@ count_type_elements (tree type)
case FUNCTION_TYPE: case FUNCTION_TYPE:
case LANG_TYPE: case LANG_TYPE:
default: default:
abort (); gcc_unreachable ();
} }
} }
...@@ -4397,721 +4363,731 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) ...@@ -4397,721 +4363,731 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
HOST_WIDE_INT exp_size = int_size_in_bytes (type); HOST_WIDE_INT exp_size = int_size_in_bytes (type);
#endif #endif
if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE switch (TREE_CODE (type))
|| TREE_CODE (type) == QUAL_UNION_TYPE)
{ {
tree elt; case RECORD_TYPE:
case UNION_TYPE:
/* If size is zero or the target is already cleared, do nothing. */ case QUAL_UNION_TYPE:
if (size == 0 || cleared) {
cleared = 1; tree elt;
/* We either clear the aggregate or indicate the value is dead. */
else if ((TREE_CODE (type) == UNION_TYPE
|| TREE_CODE (type) == QUAL_UNION_TYPE)
&& ! CONSTRUCTOR_ELTS (exp))
/* If the constructor is empty, clear the union. */
{
clear_storage (target, expr_size (exp));
cleared = 1;
}
/* If we are building a static constructor into a register,
set the initial value as zero so we can fold the value into
a constant. But if more than one register is involved,
this probably loses. */
else if (REG_P (target) && TREE_STATIC (exp)
&& GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
{
emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
cleared = 1;
}
/* If the constructor has fewer fields than the structure /* If size is zero or the target is already cleared, do nothing. */
or if we are initializing the structure to mostly zeros, if (size == 0 || cleared)
clear the whole structure first. Don't do this if TARGET is a
register whose mode size isn't equal to SIZE since clear_storage
can't handle this case. */
else if (size > 0
&& ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
|| mostly_zeros_p (exp))
&& (!REG_P (target)
|| ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
== size)))
{
clear_storage (target, GEN_INT (size));
cleared = 1; cleared = 1;
} /* We either clear the aggregate or indicate the value is dead. */
else if ((TREE_CODE (type) == UNION_TYPE
if (! cleared) || TREE_CODE (type) == QUAL_UNION_TYPE)
emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); && ! CONSTRUCTOR_ELTS (exp))
/* If the constructor is empty, clear the union. */
/* Store each element of the constructor into {
the corresponding field of TARGET. */ clear_storage (target, expr_size (exp));
cleared = 1;
for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) }
{
tree field = TREE_PURPOSE (elt);
tree value = TREE_VALUE (elt);
enum machine_mode mode;
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos = 0;
tree offset;
rtx to_rtx = target;
/* Just ignore missing fields.
We cleared the whole structure, above,
if any fields are missing. */
if (field == 0)
continue;
if (cleared && initializer_zerop (value))
continue;
if (host_integerp (DECL_SIZE (field), 1))
bitsize = tree_low_cst (DECL_SIZE (field), 1);
else
bitsize = -1;
mode = DECL_MODE (field);
if (DECL_BIT_FIELD (field))
mode = VOIDmode;
offset = DECL_FIELD_OFFSET (field); /* If we are building a static constructor into a register,
if (host_integerp (offset, 0) set the initial value as zero so we can fold the value into
&& host_integerp (bit_position (field), 0)) a constant. But if more than one register is involved,
{ this probably loses. */
bitpos = int_bit_position (field); else if (REG_P (target) && TREE_STATIC (exp)
offset = 0; && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
} {
else emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); cleared = 1;
}
if (offset) /* If the constructor has fewer fields than the structure or
{ if we are initializing the structure to mostly zeros, clear
rtx offset_rtx; the whole structure first. Don't do this if TARGET is a
register whose mode size isn't equal to SIZE since
clear_storage can't handle this case. */
else if (size > 0
&& ((list_length (CONSTRUCTOR_ELTS (exp))
!= fields_length (type))
|| mostly_zeros_p (exp))
&& (!REG_P (target)
|| ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
== size)))
{
clear_storage (target, GEN_INT (size));
cleared = 1;
}
offset if (! cleared)
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset, emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
make_tree (TREE_TYPE (exp),
target));
offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); /* Store each element of the constructor into the
if (!MEM_P (to_rtx)) corresponding field of TARGET. */
abort ();
for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
{
tree field = TREE_PURPOSE (elt);
tree value = TREE_VALUE (elt);
enum machine_mode mode;
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos = 0;
tree offset;
rtx to_rtx = target;
/* Just ignore missing fields. We cleared the whole
structure, above, if any fields are missing. */
if (field == 0)
continue;
if (cleared && initializer_zerop (value))
continue;
if (host_integerp (DECL_SIZE (field), 1))
bitsize = tree_low_cst (DECL_SIZE (field), 1);
else
bitsize = -1;
mode = DECL_MODE (field);
if (DECL_BIT_FIELD (field))
mode = VOIDmode;
offset = DECL_FIELD_OFFSET (field);
if (host_integerp (offset, 0)
&& host_integerp (bit_position (field), 0))
{
bitpos = int_bit_position (field);
offset = 0;
}
else
bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
if (offset)
{
rtx offset_rtx;
offset
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
make_tree (TREE_TYPE (exp),
target));
offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
gcc_assert (MEM_P (to_rtx));
#ifdef POINTERS_EXTEND_UNSIGNED #ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (offset_rtx) != Pmode) if (GET_MODE (offset_rtx) != Pmode)
offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
#else #else
if (GET_MODE (offset_rtx) != ptr_mode) if (GET_MODE (offset_rtx) != ptr_mode)
offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
#endif #endif
to_rtx = offset_address (to_rtx, offset_rtx, to_rtx = offset_address (to_rtx, offset_rtx,
highest_pow2_factor (offset)); highest_pow2_factor (offset));
} }
#ifdef WORD_REGISTER_OPERATIONS #ifdef WORD_REGISTER_OPERATIONS
/* If this initializes a field that is smaller than a word, at the /* If this initializes a field that is smaller than a
start of a word, try to widen it to a full word. word, at the start of a word, try to widen it to a full
This special case allows us to output C++ member function word. This special case allows us to output C++ member
initializations in a form that the optimizers can understand. */ function initializations in a form that the optimizers
if (REG_P (target) can understand. */
&& bitsize < BITS_PER_WORD if (REG_P (target)
&& bitpos % BITS_PER_WORD == 0 && bitsize < BITS_PER_WORD
&& GET_MODE_CLASS (mode) == MODE_INT && bitpos % BITS_PER_WORD == 0
&& TREE_CODE (value) == INTEGER_CST && GET_MODE_CLASS (mode) == MODE_INT
&& exp_size >= 0 && TREE_CODE (value) == INTEGER_CST
&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) && exp_size >= 0
{ && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
tree type = TREE_TYPE (value); {
tree type = TREE_TYPE (value);
if (TYPE_PRECISION (type) < BITS_PER_WORD)
{ if (TYPE_PRECISION (type) < BITS_PER_WORD)
type = lang_hooks.types.type_for_size {
(BITS_PER_WORD, TYPE_UNSIGNED (type)); type = lang_hooks.types.type_for_size
value = convert (type, value); (BITS_PER_WORD, TYPE_UNSIGNED (type));
} value = convert (type, value);
}
if (BYTES_BIG_ENDIAN)
value if (BYTES_BIG_ENDIAN)
= fold (build2 (LSHIFT_EXPR, type, value, value
build_int_cst (NULL_TREE, = fold (build2 (LSHIFT_EXPR, type, value,
BITS_PER_WORD - bitsize))); build_int_cst (NULL_TREE,
bitsize = BITS_PER_WORD; BITS_PER_WORD - bitsize)));
mode = word_mode; bitsize = BITS_PER_WORD;
} mode = word_mode;
}
#endif #endif
if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx) if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
&& DECL_NONADDRESSABLE_P (field)) && DECL_NONADDRESSABLE_P (field))
{ {
to_rtx = copy_rtx (to_rtx); to_rtx = copy_rtx (to_rtx);
MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
} }
store_constructor_field (to_rtx, bitsize, bitpos, mode, store_constructor_field (to_rtx, bitsize, bitpos, mode,
value, type, cleared, value, type, cleared,
get_alias_set (TREE_TYPE (field))); get_alias_set (TREE_TYPE (field)));
} }
} break;
}
else if (TREE_CODE (type) == ARRAY_TYPE) case ARRAY_TYPE:
{ {
tree elt; tree elt;
int i; int i;
int need_to_clear; int need_to_clear;
tree domain; tree domain;
tree elttype = TREE_TYPE (type); tree elttype = TREE_TYPE (type);
int const_bounds_p; int const_bounds_p;
HOST_WIDE_INT minelt = 0; HOST_WIDE_INT minelt = 0;
HOST_WIDE_INT maxelt = 0; HOST_WIDE_INT maxelt = 0;
domain = TYPE_DOMAIN (type); domain = TYPE_DOMAIN (type);
const_bounds_p = (TYPE_MIN_VALUE (domain) const_bounds_p = (TYPE_MIN_VALUE (domain)
&& TYPE_MAX_VALUE (domain) && TYPE_MAX_VALUE (domain)
&& host_integerp (TYPE_MIN_VALUE (domain), 0) && host_integerp (TYPE_MIN_VALUE (domain), 0)
&& host_integerp (TYPE_MAX_VALUE (domain), 0)); && host_integerp (TYPE_MAX_VALUE (domain), 0));
/* If we have constant bounds for the range of the type, get them. */ /* If we have constant bounds for the range of the type, get them. */
if (const_bounds_p) if (const_bounds_p)
{ {
minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
} }
/* If the constructor has fewer elements than the array,
clear the whole array first. Similarly if this is
static constructor of a non-BLKmode object. */
if (cleared)
need_to_clear = 0;
else if (REG_P (target) && TREE_STATIC (exp))
need_to_clear = 1;
else
{
HOST_WIDE_INT count = 0, zero_count = 0;
need_to_clear = ! const_bounds_p;
/* This loop is a more accurate version of the loop in
mostly_zeros_p (it handles RANGE_EXPR in an index).
It is also needed to check for missing elements. */
for (elt = CONSTRUCTOR_ELTS (exp);
elt != NULL_TREE && ! need_to_clear;
elt = TREE_CHAIN (elt))
{
tree index = TREE_PURPOSE (elt);
HOST_WIDE_INT this_node_count;
if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
{
tree lo_index = TREE_OPERAND (index, 0);
tree hi_index = TREE_OPERAND (index, 1);
if (! host_integerp (lo_index, 1)
|| ! host_integerp (hi_index, 1))
{
need_to_clear = 1;
break;
}
this_node_count = (tree_low_cst (hi_index, 1)
- tree_low_cst (lo_index, 1) + 1);
}
else
this_node_count = 1;
count += this_node_count;
if (mostly_zeros_p (TREE_VALUE (elt)))
zero_count += this_node_count;
}
/* Clear the entire array first if there are any missing elements,
or if the incidence of zero elements is >= 75%. */
if (! need_to_clear
&& (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
need_to_clear = 1;
}
if (need_to_clear && size > 0)
{
if (REG_P (target))
emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
else
clear_storage (target, GEN_INT (size));
cleared = 1;
}
if (!cleared && REG_P (target))
/* Inform later passes that the old value is dead. */
emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* Store each element of the constructor into
the corresponding element of TARGET, determined
by counting the elements. */
for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
elt;
elt = TREE_CHAIN (elt), i++)
{
enum machine_mode mode;
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
int unsignedp;
tree value = TREE_VALUE (elt);
tree index = TREE_PURPOSE (elt);
rtx xtarget = target;
if (cleared && initializer_zerop (value))
continue;
unsignedp = TYPE_UNSIGNED (elttype);
mode = TYPE_MODE (elttype);
if (mode == BLKmode)
bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
? tree_low_cst (TYPE_SIZE (elttype), 1)
: -1);
else
bitsize = GET_MODE_BITSIZE (mode);
if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
{
tree lo_index = TREE_OPERAND (index, 0);
tree hi_index = TREE_OPERAND (index, 1);
rtx index_r, pos_rtx;
HOST_WIDE_INT lo, hi, count;
tree position;
/* If the range is constant and "small", unroll the loop. */
if (const_bounds_p
&& host_integerp (lo_index, 0)
&& host_integerp (hi_index, 0)
&& (lo = tree_low_cst (lo_index, 0),
hi = tree_low_cst (hi_index, 0),
count = hi - lo + 1,
(!MEM_P (target)
|| count <= 2
|| (host_integerp (TYPE_SIZE (elttype), 1)
&& (tree_low_cst (TYPE_SIZE (elttype), 1) * count
<= 40 * 8)))))
{
lo -= minelt; hi -= minelt;
for (; lo <= hi; lo++)
{
bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
if (MEM_P (target)
&& !MEM_KEEP_ALIAS_SET_P (target)
&& TREE_CODE (type) == ARRAY_TYPE
&& TYPE_NONALIASED_COMPONENT (type))
{
target = copy_rtx (target);
MEM_KEEP_ALIAS_SET_P (target) = 1;
}
store_constructor_field
(target, bitsize, bitpos, mode, value, type, cleared,
get_alias_set (elttype));
}
}
else
{
rtx loop_start = gen_label_rtx ();
rtx loop_end = gen_label_rtx ();
tree exit_cond;
expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
unsignedp = TYPE_UNSIGNED (domain);
index = build_decl (VAR_DECL, NULL_TREE, domain);
index_r
= gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
&unsignedp, 0));
SET_DECL_RTL (index, index_r);
store_expr (lo_index, index_r, 0);
/* Build the head of the loop. */
do_pending_stack_adjust ();
emit_label (loop_start);
/* Assign value to element index. */
position
= convert (ssizetype,
fold (build2 (MINUS_EXPR, TREE_TYPE (index),
index, TYPE_MIN_VALUE (domain))));
position = size_binop (MULT_EXPR, position,
convert (ssizetype,
TYPE_SIZE_UNIT (elttype)));
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
xtarget = offset_address (target, pos_rtx,
highest_pow2_factor (position));
xtarget = adjust_address (xtarget, mode, 0);
if (TREE_CODE (value) == CONSTRUCTOR)
store_constructor (value, xtarget, cleared,
bitsize / BITS_PER_UNIT);
else
store_expr (value, xtarget, 0);
/* Generate a conditional jump to exit the loop. */
exit_cond = build2 (LT_EXPR, integer_type_node,
index, hi_index);
jumpif (exit_cond, loop_end);
/* Update the loop counter, and jump to the head of
the loop. */
expand_assignment (index,
build2 (PLUS_EXPR, TREE_TYPE (index),
index, integer_one_node), 0);
emit_jump (loop_start);
/* Build the end of the loop. */
emit_label (loop_end);
}
}
else if ((index != 0 && ! host_integerp (index, 0))
|| ! host_integerp (TYPE_SIZE (elttype), 1))
{
tree position;
if (index == 0)
index = ssize_int (1);
if (minelt)
index = fold_convert (ssizetype,
fold (build2 (MINUS_EXPR,
TREE_TYPE (index),
index,
TYPE_MIN_VALUE (domain))));
position = size_binop (MULT_EXPR, index,
convert (ssizetype,
TYPE_SIZE_UNIT (elttype)));
xtarget = offset_address (target,
expand_expr (position, 0, VOIDmode, 0),
highest_pow2_factor (position));
xtarget = adjust_address (xtarget, mode, 0);
store_expr (value, xtarget, 0);
}
else
{
if (index != 0)
bitpos = ((tree_low_cst (index, 0) - minelt)
* tree_low_cst (TYPE_SIZE (elttype), 1));
else
bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
&& TREE_CODE (type) == ARRAY_TYPE
&& TYPE_NONALIASED_COMPONENT (type))
{
target = copy_rtx (target);
MEM_KEEP_ALIAS_SET_P (target) = 1;
}
store_constructor_field (target, bitsize, bitpos, mode, value,
type, cleared, get_alias_set (elttype));
}
}
}
else if (TREE_CODE (type) == VECTOR_TYPE)
{
tree elt;
int i;
int need_to_clear;
int icode = 0;
tree elttype = TREE_TYPE (type);
int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
enum machine_mode eltmode = TYPE_MODE (elttype);
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
rtx *vector = NULL;
unsigned n_elts;
if (eltmode == BLKmode)
abort ();
n_elts = TYPE_VECTOR_SUBPARTS (type);
if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
{
enum machine_mode mode = GET_MODE (target);
icode = (int) vec_init_optab->handlers[mode].insn_code;
if (icode != CODE_FOR_nothing)
{
unsigned int i;
vector = alloca (n_elts);
for (i = 0; i < n_elts; i++)
vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
}
}
/* If the constructor has fewer elements than the vector,
clear the whole array first. Similarly if this is
static constructor of a non-BLKmode object. */
if (cleared)
need_to_clear = 0;
else if (REG_P (target) && TREE_STATIC (exp))
need_to_clear = 1;
else
{
unsigned HOST_WIDE_INT count = 0, zero_count = 0;
for (elt = CONSTRUCTOR_ELTS (exp);
elt != NULL_TREE;
elt = TREE_CHAIN (elt))
{
int n_elts_here =
tree_low_cst (
int_const_binop (TRUNC_DIV_EXPR,
TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
TYPE_SIZE (elttype), 0), 1);
count += n_elts_here;
if (mostly_zeros_p (TREE_VALUE (elt)))
zero_count += n_elts_here;
}
/* Clear the entire vector first if there are any missing elements,
or if the incidence of zero elements is >= 75%. */
need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
}
if (need_to_clear && size > 0 && !vector)
{
if (REG_P (target))
emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
else
clear_storage (target, GEN_INT (size));
cleared = 1;
}
if (!cleared && REG_P (target)) /* If the constructor has fewer elements than the array, clear
/* Inform later passes that the old value is dead. */ the whole array first. Similarly if this is static
emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); constructor of a non-BLKmode object. */
if (cleared)
need_to_clear = 0;
else if (REG_P (target) && TREE_STATIC (exp))
need_to_clear = 1;
else
{
HOST_WIDE_INT count = 0, zero_count = 0;
need_to_clear = ! const_bounds_p;
/* This loop is a more accurate version of the loop in
mostly_zeros_p (it handles RANGE_EXPR in an index). It
is also needed to check for missing elements. */
for (elt = CONSTRUCTOR_ELTS (exp);
elt != NULL_TREE && ! need_to_clear;
elt = TREE_CHAIN (elt))
{
tree index = TREE_PURPOSE (elt);
HOST_WIDE_INT this_node_count;
if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
{
tree lo_index = TREE_OPERAND (index, 0);
tree hi_index = TREE_OPERAND (index, 1);
if (! host_integerp (lo_index, 1)
|| ! host_integerp (hi_index, 1))
{
need_to_clear = 1;
break;
}
this_node_count = (tree_low_cst (hi_index, 1)
- tree_low_cst (lo_index, 1) + 1);
}
else
this_node_count = 1;
count += this_node_count;
if (mostly_zeros_p (TREE_VALUE (elt)))
zero_count += this_node_count;
}
/* Clear the entire array first if there are any missing
elements, or if the incidence of zero elements is >=
75%. */
if (! need_to_clear
&& (count < maxelt - minelt + 1
|| 4 * zero_count >= 3 * count))
need_to_clear = 1;
}
if (need_to_clear && size > 0)
{
if (REG_P (target))
emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
else
clear_storage (target, GEN_INT (size));
cleared = 1;
}
/* Store each element of the constructor into the corresponding if (!cleared && REG_P (target))
element of TARGET, determined by counting the elements. */ /* Inform later passes that the old value is dead. */
for (elt = CONSTRUCTOR_ELTS (exp), i = 0; emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
elt;
elt = TREE_CHAIN (elt), i += bitsize / elt_size)
{
tree value = TREE_VALUE (elt);
tree index = TREE_PURPOSE (elt);
HOST_WIDE_INT eltpos;
bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1); /* Store each element of the constructor into the
if (cleared && initializer_zerop (value)) corresponding element of TARGET, determined by counting the
continue; elements. */
for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
elt;
elt = TREE_CHAIN (elt), i++)
{
enum machine_mode mode;
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
int unsignedp;
tree value = TREE_VALUE (elt);
tree index = TREE_PURPOSE (elt);
rtx xtarget = target;
if (cleared && initializer_zerop (value))
continue;
unsignedp = TYPE_UNSIGNED (elttype);
mode = TYPE_MODE (elttype);
if (mode == BLKmode)
bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
? tree_low_cst (TYPE_SIZE (elttype), 1)
: -1);
else
bitsize = GET_MODE_BITSIZE (mode);
if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
{
tree lo_index = TREE_OPERAND (index, 0);
tree hi_index = TREE_OPERAND (index, 1);
rtx index_r, pos_rtx;
HOST_WIDE_INT lo, hi, count;
tree position;
/* If the range is constant and "small", unroll the loop. */
if (const_bounds_p
&& host_integerp (lo_index, 0)
&& host_integerp (hi_index, 0)
&& (lo = tree_low_cst (lo_index, 0),
hi = tree_low_cst (hi_index, 0),
count = hi - lo + 1,
(!MEM_P (target)
|| count <= 2
|| (host_integerp (TYPE_SIZE (elttype), 1)
&& (tree_low_cst (TYPE_SIZE (elttype), 1) * count
<= 40 * 8)))))
{
lo -= minelt; hi -= minelt;
for (; lo <= hi; lo++)
{
bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
if (MEM_P (target)
&& !MEM_KEEP_ALIAS_SET_P (target)
&& TREE_CODE (type) == ARRAY_TYPE
&& TYPE_NONALIASED_COMPONENT (type))
{
target = copy_rtx (target);
MEM_KEEP_ALIAS_SET_P (target) = 1;
}
store_constructor_field
(target, bitsize, bitpos, mode, value, type, cleared,
get_alias_set (elttype));
}
}
else
{
rtx loop_start = gen_label_rtx ();
rtx loop_end = gen_label_rtx ();
tree exit_cond;
expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
unsignedp = TYPE_UNSIGNED (domain);
index = build_decl (VAR_DECL, NULL_TREE, domain);
index_r
= gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
&unsignedp, 0));
SET_DECL_RTL (index, index_r);
store_expr (lo_index, index_r, 0);
/* Build the head of the loop. */
do_pending_stack_adjust ();
emit_label (loop_start);
/* Assign value to element index. */
position
= convert (ssizetype,
fold (build2 (MINUS_EXPR, TREE_TYPE (index),
index, TYPE_MIN_VALUE (domain))));
position = size_binop (MULT_EXPR, position,
convert (ssizetype,
TYPE_SIZE_UNIT (elttype)));
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
xtarget = offset_address (target, pos_rtx,
highest_pow2_factor (position));
xtarget = adjust_address (xtarget, mode, 0);
if (TREE_CODE (value) == CONSTRUCTOR)
store_constructor (value, xtarget, cleared,
bitsize / BITS_PER_UNIT);
else
store_expr (value, xtarget, 0);
/* Generate a conditional jump to exit the loop. */
exit_cond = build2 (LT_EXPR, integer_type_node,
index, hi_index);
jumpif (exit_cond, loop_end);
/* Update the loop counter, and jump to the head of
the loop. */
expand_assignment (index,
build2 (PLUS_EXPR, TREE_TYPE (index),
index, integer_one_node), 0);
emit_jump (loop_start);
/* Build the end of the loop. */
emit_label (loop_end);
}
}
else if ((index != 0 && ! host_integerp (index, 0))
|| ! host_integerp (TYPE_SIZE (elttype), 1))
{
tree position;
if (index == 0)
index = ssize_int (1);
if (minelt)
index = fold_convert (ssizetype,
fold (build2 (MINUS_EXPR,
TREE_TYPE (index),
index,
TYPE_MIN_VALUE (domain))));
position = size_binop (MULT_EXPR, index,
convert (ssizetype,
TYPE_SIZE_UNIT (elttype)));
xtarget = offset_address (target,
expand_expr (position, 0, VOIDmode, 0),
highest_pow2_factor (position));
xtarget = adjust_address (xtarget, mode, 0);
store_expr (value, xtarget, 0);
}
else
{
if (index != 0)
bitpos = ((tree_low_cst (index, 0) - minelt)
* tree_low_cst (TYPE_SIZE (elttype), 1));
else
bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
&& TREE_CODE (type) == ARRAY_TYPE
&& TYPE_NONALIASED_COMPONENT (type))
{
target = copy_rtx (target);
MEM_KEEP_ALIAS_SET_P (target) = 1;
}
store_constructor_field (target, bitsize, bitpos, mode, value,
type, cleared, get_alias_set (elttype));
}
}
break;
}
if (index != 0) case VECTOR_TYPE:
eltpos = tree_low_cst (index, 1); {
else tree elt;
eltpos = i; int i;
int need_to_clear;
int icode = 0;
tree elttype = TREE_TYPE (type);
int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
enum machine_mode eltmode = TYPE_MODE (elttype);
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
rtx *vector = NULL;
unsigned n_elts;
gcc_assert (eltmode != BLKmode);
n_elts = TYPE_VECTOR_SUBPARTS (type);
if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
{
enum machine_mode mode = GET_MODE (target);
icode = (int) vec_init_optab->handlers[mode].insn_code;
if (icode != CODE_FOR_nothing)
{
unsigned int i;
vector = alloca (n_elts);
for (i = 0; i < n_elts; i++)
vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
}
}
/* If the constructor has fewer elements than the vector,
clear the whole array first. Similarly if this is static
constructor of a non-BLKmode object. */
if (cleared)
need_to_clear = 0;
else if (REG_P (target) && TREE_STATIC (exp))
need_to_clear = 1;
else
{
unsigned HOST_WIDE_INT count = 0, zero_count = 0;
for (elt = CONSTRUCTOR_ELTS (exp);
elt != NULL_TREE;
elt = TREE_CHAIN (elt))
{
int n_elts_here = tree_low_cst
(int_const_binop (TRUNC_DIV_EXPR,
TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
TYPE_SIZE (elttype), 0), 1);
count += n_elts_here;
if (mostly_zeros_p (TREE_VALUE (elt)))
zero_count += n_elts_here;
}
if (vector) /* Clear the entire vector first if there are any missing elements,
{ or if the incidence of zero elements is >= 75%. */
/* Vector CONSTRUCTORs should only be built from smaller need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
vectors in the case of BLKmode vectors. */ }
if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
abort (); if (need_to_clear && size > 0 && !vector)
vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0); {
} if (REG_P (target))
else emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
{ else
enum machine_mode value_mode = clear_storage (target, GEN_INT (size));
TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE cleared = 1;
}
if (!cleared && REG_P (target))
/* Inform later passes that the old value is dead. */
emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* Store each element of the constructor into the corresponding
element of TARGET, determined by counting the elements. */
for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
elt;
elt = TREE_CHAIN (elt), i += bitsize / elt_size)
{
tree value = TREE_VALUE (elt);
tree index = TREE_PURPOSE (elt);
HOST_WIDE_INT eltpos;
bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
if (cleared && initializer_zerop (value))
continue;
if (index != 0)
eltpos = tree_low_cst (index, 1);
else
eltpos = i;
if (vector)
{
/* Vector CONSTRUCTORs should only be built from smaller
vectors in the case of BLKmode vectors. */
gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
}
else
{
enum machine_mode value_mode =
TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
? TYPE_MODE (TREE_TYPE (value)) ? TYPE_MODE (TREE_TYPE (value))
: eltmode; : eltmode;
bitpos = eltpos * elt_size; bitpos = eltpos * elt_size;
store_constructor_field (target, bitsize, bitpos, value_mode, value, store_constructor_field (target, bitsize, bitpos,
type, cleared, get_alias_set (elttype)); value_mode, value, type,
} cleared, get_alias_set (elttype));
} }
}
if (vector)
emit_insn (GEN_FCN (icode) (target, if (vector)
gen_rtx_PARALLEL (GET_MODE (target), emit_insn (GEN_FCN (icode)
gen_rtvec_v (n_elts, vector)))); (target,
} gen_rtx_PARALLEL (GET_MODE (target),
gen_rtvec_v (n_elts, vector))));
/* Set constructor assignments. */ break;
else if (TREE_CODE (type) == SET_TYPE) }
{
tree elt = CONSTRUCTOR_ELTS (exp);
unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
tree domain = TYPE_DOMAIN (type);
tree domain_min, domain_max, bitlength;
/* The default implementation strategy is to extract the constant
parts of the constructor, use that to initialize the target,
and then "or" in whatever non-constant ranges we need in addition.
If a large set is all zero or all ones, it is
probably better to set it using memset.
Also, if a large set has just a single range, it may also be
better to first clear all the first clear the set (using
memset), and set the bits we want. */
/* Check for all zeros. */
if (elt == NULL_TREE && size > 0)
{
if (!cleared)
clear_storage (target, GEN_INT (size));
return;
}
domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
bitlength = size_binop (PLUS_EXPR,
size_diffop (domain_max, domain_min),
ssize_int (1));
nbits = tree_low_cst (bitlength, 1);
/* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
are "complicated" (more than one range), initialize (the
constant parts) by copying from a constant. */
if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
|| (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
{
unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
char *bit_buffer = alloca (nbits);
HOST_WIDE_INT word = 0;
unsigned int bit_pos = 0;
unsigned int ibit = 0;
unsigned int offset = 0; /* In bytes from beginning of set. */
elt = get_set_constructor_bits (exp, bit_buffer, nbits);
for (;;)
{
if (bit_buffer[ibit])
{
if (BYTES_BIG_ENDIAN)
word |= (1 << (set_word_size - 1 - bit_pos));
else
word |= 1 << bit_pos;
}
bit_pos++; ibit++;
if (bit_pos >= set_word_size || ibit == nbits)
{
if (word != 0 || ! cleared)
{
rtx datum = gen_int_mode (word, mode);
rtx to_rtx;
/* The assumption here is that it is safe to use
XEXP if the set is multi-word, but not if
it's single-word. */
if (MEM_P (target))
to_rtx = adjust_address (target, mode, offset);
else if (offset == 0)
to_rtx = target;
else
abort ();
emit_move_insn (to_rtx, datum);
}
if (ibit == nbits)
break;
word = 0;
bit_pos = 0;
offset += set_word_size / BITS_PER_UNIT;
}
}
}
else if (!cleared)
/* Don't bother clearing storage if the set is all ones. */
if (TREE_CHAIN (elt) != NULL_TREE
|| (TREE_PURPOSE (elt) == NULL_TREE
? nbits != 1
: ( ! host_integerp (TREE_VALUE (elt), 0)
|| ! host_integerp (TREE_PURPOSE (elt), 0)
|| (tree_low_cst (TREE_VALUE (elt), 0)
- tree_low_cst (TREE_PURPOSE (elt), 0) + 1
!= (HOST_WIDE_INT) nbits))))
clear_storage (target, expr_size (exp));
for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
{
/* Start of range of element or NULL. */
tree startbit = TREE_PURPOSE (elt);
/* End of range of element, or element value. */
tree endbit = TREE_VALUE (elt);
HOST_WIDE_INT startb, endb;
rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
bitlength_rtx = expand_expr (bitlength,
NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
/* Handle non-range tuple element like [ expr ]. */
if (startbit == NULL_TREE)
{
startbit = save_expr (endbit);
endbit = startbit;
}
startbit = convert (sizetype, startbit); /* Set constructor assignments. */
endbit = convert (sizetype, endbit); case SET_TYPE:
if (! integer_zerop (domain_min)) {
{ tree elt = CONSTRUCTOR_ELTS (exp);
startbit = size_binop (MINUS_EXPR, startbit, domain_min); unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
endbit = size_binop (MINUS_EXPR, endbit, domain_min); tree domain = TYPE_DOMAIN (type);
} tree domain_min, domain_max, bitlength;
startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
/* The default implementation strategy is to extract the
constant parts of the constructor, use that to initialize
the target, and then "or" in whatever non-constant ranges
we need in addition.
If a large set is all zero or all ones, it is probably
better to set it using memset. Also, if a large set has
just a single range, it may also be better to first clear
all the first clear the set (using memset), and set the
bits we want. */
/* Check for all zeros. */
if (elt == NULL_TREE && size > 0)
{
if (!cleared)
clear_storage (target, GEN_INT (size));
return;
}
domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
bitlength = size_binop (PLUS_EXPR,
size_diffop (domain_max, domain_min),
ssize_int (1));
nbits = tree_low_cst (bitlength, 1);
/* For "small" sets, or "medium-sized" (up to 32 bytes) sets
that are "complicated" (more than one range), initialize
(the constant parts) by copying from a constant. */
if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
|| (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
{
unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
char *bit_buffer = alloca (nbits);
HOST_WIDE_INT word = 0;
unsigned int bit_pos = 0;
unsigned int ibit = 0;
unsigned int offset = 0; /* In bytes from beginning of set. */
elt = get_set_constructor_bits (exp, bit_buffer, nbits);
for (;;)
{
if (bit_buffer[ibit])
{
if (BYTES_BIG_ENDIAN)
word |= (1 << (set_word_size - 1 - bit_pos));
else
word |= 1 << bit_pos;
}
bit_pos++; ibit++;
if (bit_pos >= set_word_size || ibit == nbits)
{
if (word != 0 || ! cleared)
{
rtx datum = gen_int_mode (word, mode);
rtx to_rtx;
/* The assumption here is that it is safe to
use XEXP if the set is multi-word, but not
if it's single-word. */
if (MEM_P (target))
to_rtx = adjust_address (target, mode, offset);
else
{
gcc_assert (!offset);
to_rtx = target;
}
emit_move_insn (to_rtx, datum);
}
if (ibit == nbits)
break;
word = 0;
bit_pos = 0;
offset += set_word_size / BITS_PER_UNIT;
}
}
}
else if (!cleared)
/* Don't bother clearing storage if the set is all ones. */
if (TREE_CHAIN (elt) != NULL_TREE
|| (TREE_PURPOSE (elt) == NULL_TREE
? nbits != 1
: ( ! host_integerp (TREE_VALUE (elt), 0)
|| ! host_integerp (TREE_PURPOSE (elt), 0)
|| (tree_low_cst (TREE_VALUE (elt), 0)
- tree_low_cst (TREE_PURPOSE (elt), 0) + 1
!= (HOST_WIDE_INT) nbits))))
clear_storage (target, expr_size (exp));
for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
{
/* Start of range of element or NULL. */
tree startbit = TREE_PURPOSE (elt);
/* End of range of element, or element value. */
tree endbit = TREE_VALUE (elt);
HOST_WIDE_INT startb, endb;
rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
bitlength_rtx = expand_expr (bitlength,
NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
/* Handle non-range tuple element like [ expr ]. */
if (startbit == NULL_TREE)
{
startbit = save_expr (endbit);
endbit = startbit;
}
startbit = convert (sizetype, startbit);
endbit = convert (sizetype, endbit);
if (! integer_zerop (domain_min))
{
startbit = size_binop (MINUS_EXPR, startbit, domain_min);
endbit = size_binop (MINUS_EXPR, endbit, domain_min);
}
startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
EXPAND_CONST_ADDRESS);
endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
EXPAND_CONST_ADDRESS); EXPAND_CONST_ADDRESS);
endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
EXPAND_CONST_ADDRESS); if (REG_P (target))
{
if (REG_P (target)) targetx
{ = assign_temp
targetx
= assign_temp
((build_qualified_type (lang_hooks.types.type_for_mode ((build_qualified_type (lang_hooks.types.type_for_mode
(GET_MODE (target), 0), (GET_MODE (target), 0),
TYPE_QUAL_CONST)), TYPE_QUAL_CONST)),
0, 1, 1); 0, 1, 1);
emit_move_insn (targetx, target); emit_move_insn (targetx, target);
} }
else
{
gcc_assert (MEM_P (target));
targetx = target;
}
else if (MEM_P (target)) /* Optimization: If startbit and endbit are constants divisible
targetx = target; by BITS_PER_UNIT, call memset instead. */
else if (TREE_CODE (startbit) == INTEGER_CST
abort (); && TREE_CODE (endbit) == INTEGER_CST
&& (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
/* Optimization: If startbit and endbit are constants divisible && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
by BITS_PER_UNIT, call memset instead. */ {
if (TREE_CODE (startbit) == INTEGER_CST emit_library_call (memset_libfunc, LCT_NORMAL,
&& TREE_CODE (endbit) == INTEGER_CST VOIDmode, 3,
&& (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0 plus_constant (XEXP (targetx, 0),
&& (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0) startb / BITS_PER_UNIT),
{ Pmode,
emit_library_call (memset_libfunc, LCT_NORMAL, constm1_rtx, TYPE_MODE (integer_type_node),
VOIDmode, 3, GEN_INT ((endb - startb) / BITS_PER_UNIT),
plus_constant (XEXP (targetx, 0), TYPE_MODE (sizetype));
startb / BITS_PER_UNIT), }
Pmode, else
constm1_rtx, TYPE_MODE (integer_type_node), emit_library_call (setbits_libfunc, LCT_NORMAL,
GEN_INT ((endb - startb) / BITS_PER_UNIT), VOIDmode, 4, XEXP (targetx, 0),
TYPE_MODE (sizetype)); Pmode, bitlength_rtx, TYPE_MODE (sizetype),
} startbit_rtx, TYPE_MODE (sizetype),
else endbit_rtx, TYPE_MODE (sizetype));
emit_library_call (setbits_libfunc, LCT_NORMAL,
VOIDmode, 4, XEXP (targetx, 0), if (REG_P (target))
Pmode, bitlength_rtx, TYPE_MODE (sizetype), emit_move_insn (target, targetx);
startbit_rtx, TYPE_MODE (sizetype), }
endbit_rtx, TYPE_MODE (sizetype)); break;
}
if (REG_P (target)) default:
emit_move_insn (target, targetx); gcc_unreachable ();
}
} }
else
abort ();
} }
/* Store the value of EXP (an expression tree) /* Store the value of EXP (an expression tree)
...@@ -5183,8 +5159,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -5183,8 +5159,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
{ {
/* We're storing into a struct containing a single __complex. */ /* We're storing into a struct containing a single __complex. */
if (bitpos != 0) gcc_assert (!bitpos);
abort ();
return store_expr (exp, target, value_mode != VOIDmode); return store_expr (exp, target, value_mode != VOIDmode);
} }
...@@ -5237,9 +5212,8 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, ...@@ -5237,9 +5212,8 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
boundary. If so, we simply do a block copy. */ boundary. If so, we simply do a block copy. */
if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode) if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
{ {
if (!MEM_P (target) || !MEM_P (temp) gcc_assert (MEM_P (target) && MEM_P (temp)
|| bitpos % BITS_PER_UNIT != 0) && !(bitpos % BITS_PER_UNIT));
abort ();
target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
emit_block_move (target, temp, emit_block_move (target, temp,
...@@ -5873,7 +5847,7 @@ safe_from_p (rtx x, tree exp, int top_p) ...@@ -5873,7 +5847,7 @@ safe_from_p (rtx x, tree exp, int top_p)
case WITH_CLEANUP_EXPR: case WITH_CLEANUP_EXPR:
case CLEANUP_POINT_EXPR: case CLEANUP_POINT_EXPR:
/* Lowered by gimplify.c. */ /* Lowered by gimplify.c. */
abort (); gcc_unreachable ();
case SAVE_EXPR: case SAVE_EXPR:
return safe_from_p (x, TREE_OPERAND (exp, 0), 0); return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
...@@ -6037,13 +6011,12 @@ expand_var (tree var) ...@@ -6037,13 +6011,12 @@ expand_var (tree var)
expand_decl (var); expand_decl (var);
else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var)) else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
rest_of_decl_compilation (var, 0, 0); rest_of_decl_compilation (var, 0, 0);
else if (TREE_CODE (var) == TYPE_DECL
|| TREE_CODE (var) == CONST_DECL
|| TREE_CODE (var) == FUNCTION_DECL
|| TREE_CODE (var) == LABEL_DECL)
/* No expansion needed. */;
else else
abort (); /* No expansion needed. */
gcc_assert (TREE_CODE (var) == TYPE_DECL
|| TREE_CODE (var) == CONST_DECL
|| TREE_CODE (var) == FUNCTION_DECL
|| TREE_CODE (var) == LABEL_DECL);
} }
} }
...@@ -6142,8 +6115,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, ...@@ -6142,8 +6115,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
/* If the DECL isn't in memory, then the DECL wasn't properly /* If the DECL isn't in memory, then the DECL wasn't properly
marked TREE_ADDRESSABLE, which will be either a front-end marked TREE_ADDRESSABLE, which will be either a front-end
or a tree optimizer bug. */ or a tree optimizer bug. */
if (GET_CODE (result) != MEM) gcc_assert (GET_CODE (result) == MEM);
abort ();
result = XEXP (result, 0); result = XEXP (result, 0);
/* ??? Is this needed anymore? */ /* ??? Is this needed anymore? */
...@@ -6165,8 +6137,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, ...@@ -6165,8 +6137,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
} }
/* We must have made progress. */ /* We must have made progress. */
if (inner == exp) gcc_assert (inner != exp);
abort ();
subtarget = offset || bitpos ? NULL_RTX : target; subtarget = offset || bitpos ? NULL_RTX : target;
result = expand_expr_addr_expr (inner, subtarget, tmode, modifier); result = expand_expr_addr_expr (inner, subtarget, tmode, modifier);
...@@ -6200,8 +6171,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, ...@@ -6200,8 +6171,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
{ {
/* Someone beforehand should have rejected taking the address /* Someone beforehand should have rejected taking the address
of such an object. */ of such an object. */
if (bitpos % BITS_PER_UNIT != 0) gcc_assert (!(bitpos % BITS_PER_UNIT));
abort ();
result = plus_constant (result, bitpos / BITS_PER_UNIT); result = plus_constant (result, bitpos / BITS_PER_UNIT);
if (modifier < EXPAND_SUM) if (modifier < EXPAND_SUM)
...@@ -6479,8 +6449,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -6479,8 +6449,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
case FUNCTION_DECL: case FUNCTION_DECL:
case RESULT_DECL: case RESULT_DECL:
if (DECL_RTL (exp) == 0) gcc_assert (DECL_RTL (exp));
abort ();
/* Ensure variable marked as used even if it doesn't go through /* Ensure variable marked as used even if it doesn't go through
a parser. If it hasn't be used yet, write out an external a parser. If it hasn't be used yet, write out an external
...@@ -6497,18 +6466,17 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -6497,18 +6466,17 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
/* Variables inherited from containing functions should have /* Variables inherited from containing functions should have
been lowered by this point. */ been lowered by this point. */
context = decl_function_context (exp); context = decl_function_context (exp);
if (context != 0 gcc_assert (!context
&& context != current_function_decl || context == current_function_decl
&& !TREE_STATIC (exp) || TREE_STATIC (exp)
/* ??? C++ creates functions that are not TREE_STATIC. */ /* ??? C++ creates functions that are not TREE_STATIC. */
&& TREE_CODE (exp) != FUNCTION_DECL) || TREE_CODE (exp) == FUNCTION_DECL);
abort ();
/* This is the case of an array whose size is to be determined /* This is the case of an array whose size is to be determined
from its initializer, while the initializer is still being parsed. from its initializer, while the initializer is still being parsed.
See expand_decl. */ See expand_decl. */
else if (MEM_P (DECL_RTL (exp)) if (MEM_P (DECL_RTL (exp))
&& REG_P (XEXP (DECL_RTL (exp), 0))) && REG_P (XEXP (DECL_RTL (exp), 0)))
temp = validize_mem (DECL_RTL (exp)); temp = validize_mem (DECL_RTL (exp));
...@@ -6548,12 +6516,13 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -6548,12 +6516,13 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
if (REG_P (DECL_RTL (exp)) if (REG_P (DECL_RTL (exp))
&& GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp)) && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
{ {
enum machine_mode pmode;
/* Get the signedness used for this variable. Ensure we get the /* Get the signedness used for this variable. Ensure we get the
same mode we got when the variable was declared. */ same mode we got when the variable was declared. */
if (GET_MODE (DECL_RTL (exp)) pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
!= promote_mode (type, DECL_MODE (exp), &unsignedp, (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
(TREE_CODE (exp) == RESULT_DECL ? 1 : 0))) gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
abort ();
temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp)); temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
SUBREG_PROMOTED_VAR_P (temp) = 1; SUBREG_PROMOTED_VAR_P (temp) = 1;
...@@ -6654,8 +6623,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -6654,8 +6623,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
expanders calling save_expr immediately before expanding expanders calling save_expr immediately before expanding
something. Assume this means that we only have to deal something. Assume this means that we only have to deal
with non-BLKmode values. */ with non-BLKmode values. */
if (GET_MODE (ret) == BLKmode) gcc_assert (GET_MODE (ret) != BLKmode);
abort ();
val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp)); val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
DECL_ARTIFICIAL (val) = 1; DECL_ARTIFICIAL (val) = 1;
...@@ -6772,17 +6740,14 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -6772,17 +6740,14 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
case ARRAY_REF: case ARRAY_REF:
#ifdef ENABLE_CHECKING
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
abort ();
#endif
{ {
tree array = TREE_OPERAND (exp, 0); tree array = TREE_OPERAND (exp, 0);
tree low_bound = array_ref_low_bound (exp); tree low_bound = array_ref_low_bound (exp);
tree index = convert (sizetype, TREE_OPERAND (exp, 1)); tree index = convert (sizetype, TREE_OPERAND (exp, 1));
HOST_WIDE_INT i; HOST_WIDE_INT i;
gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
/* Optimize the special-case of a zero lower bound. /* Optimize the special-case of a zero lower bound.
We convert the low_bound to sizetype to avoid some problems We convert the low_bound to sizetype to avoid some problems
...@@ -6952,8 +6917,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -6952,8 +6917,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
/* If we got back the original object, something is wrong. Perhaps /* If we got back the original object, something is wrong. Perhaps
we are evaluating an expression too early. In any event, don't we are evaluating an expression too early. In any event, don't
infinitely recurse. */ infinitely recurse. */
if (tem == exp) gcc_assert (tem != exp);
abort ();
/* If TEM's type is a union of variable size, pass TARGET to the inner /* If TEM's type is a union of variable size, pass TARGET to the inner
computation, since it will need a temporary and TARGET is known computation, since it will need a temporary and TARGET is known
...@@ -7007,8 +6971,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7007,8 +6971,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
EXPAND_SUM); EXPAND_SUM);
if (!MEM_P (op0)) gcc_assert (MEM_P (op0));
abort ();
#ifdef POINTERS_EXTEND_UNSIGNED #ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (offset_rtx) != Pmode) if (GET_MODE (offset_rtx) != Pmode)
...@@ -7055,8 +7018,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7055,8 +7018,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
one element arrays having the same mode as its element. */ one element arrays having the same mode as its element. */
if (GET_CODE (op0) == CONCAT) if (GET_CODE (op0) == CONCAT)
{ {
if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0))) gcc_assert (bitpos == 0
abort (); && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
return op0; return op0;
} }
...@@ -7113,10 +7076,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7113,10 +7076,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
/* In this case, BITPOS must start at a byte boundary and /* In this case, BITPOS must start at a byte boundary and
TARGET, if specified, must be a MEM. */ TARGET, if specified, must be a MEM. */
if (!MEM_P (op0) gcc_assert (MEM_P (op0)
|| (target != 0 && !MEM_P (target)) && (!target || MEM_P (target))
|| bitpos % BITS_PER_UNIT != 0) && !(bitpos % BITS_PER_UNIT));
abort ();
emit_block_move (target, emit_block_move (target,
adjust_address (op0, VOIDmode, adjust_address (op0, VOIDmode,
...@@ -7261,17 +7223,19 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7261,17 +7223,19 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
adjust_address (target, TYPE_MODE (valtype), 0), adjust_address (target, TYPE_MODE (valtype), 0),
modifier == EXPAND_STACK_PARM ? 2 : 0); modifier == EXPAND_STACK_PARM ? 2 : 0);
else if (REG_P (target))
/* Store this field into a union of the proper type. */
store_field (target,
MIN ((int_size_in_bytes (TREE_TYPE
(TREE_OPERAND (exp, 0)))
* BITS_PER_UNIT),
(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
VOIDmode, 0, type, 0);
else else
abort (); {
gcc_assert (REG_P (target));
/* Store this field into a union of the proper type. */
store_field (target,
MIN ((int_size_in_bytes (TREE_TYPE
(TREE_OPERAND (exp, 0)))
* BITS_PER_UNIT),
(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
VOIDmode, 0, type, 0);
}
/* Return the entire union. */ /* Return the entire union. */
return target; return target;
...@@ -7347,8 +7311,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7347,8 +7311,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
constants to change mode. */ constants to change mode. */
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
if (TREE_ADDRESSABLE (exp)) gcc_assert (!TREE_ADDRESSABLE (exp));
abort ();
if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type)) if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
target target
...@@ -7381,8 +7344,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7381,8 +7344,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
temp_size, 0, type); temp_size, 0, type);
rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0); rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
if (TREE_ADDRESSABLE (exp)) gcc_assert (!TREE_ADDRESSABLE (exp));
abort ();
if (GET_MODE (op0) == BLKmode) if (GET_MODE (op0) == BLKmode)
emit_block_move (new_with_op0_mode, op0, emit_block_move (new_with_op0_mode, op0,
...@@ -7712,7 +7674,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7712,7 +7674,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
case FIX_ROUND_EXPR: case FIX_ROUND_EXPR:
case FIX_FLOOR_EXPR: case FIX_FLOOR_EXPR:
case FIX_CEIL_EXPR: case FIX_CEIL_EXPR:
abort (); /* Not used for C. */ gcc_unreachable (); /* Not used for C. */
case FIX_TRUNC_EXPR: case FIX_TRUNC_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
...@@ -7741,8 +7703,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7741,8 +7703,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
temp = expand_unop (mode, temp = expand_unop (mode,
optab_for_tree_code (NEGATE_EXPR, type), optab_for_tree_code (NEGATE_EXPR, type),
op0, target, 0); op0, target, 0);
if (temp == 0) gcc_assert (temp);
abort ();
return REDUCE_BIT_FIELD (temp); return REDUCE_BIT_FIELD (temp);
case ABS_EXPR: case ABS_EXPR:
...@@ -7751,9 +7712,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7751,9 +7712,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
target = 0; target = 0;
/* ABS_EXPR is not valid for complex arguments. */ /* ABS_EXPR is not valid for complex arguments. */
if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
|| GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT) && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
abort ();
/* Unsigned abs is simply the operand. Testing here means we don't /* Unsigned abs is simply the operand. Testing here means we don't
risk generating incorrect code below. */ risk generating incorrect code below. */
...@@ -7830,8 +7790,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7830,8 +7790,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
if (modifier == EXPAND_STACK_PARM) if (modifier == EXPAND_STACK_PARM)
target = 0; target = 0;
temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
if (temp == 0) gcc_assert (temp);
abort ();
return temp; return temp;
/* ??? Can optimize bitwise operations with one arg constant. /* ??? Can optimize bitwise operations with one arg constant.
...@@ -7968,16 +7927,14 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7968,16 +7927,14 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
only with operands that are always zero or one. */ only with operands that are always zero or one. */
temp = expand_binop (mode, xor_optab, op0, const1_rtx, temp = expand_binop (mode, xor_optab, op0, const1_rtx,
target, 1, OPTAB_LIB_WIDEN); target, 1, OPTAB_LIB_WIDEN);
if (temp == 0) gcc_assert (temp);
abort ();
return temp; return temp;
case STATEMENT_LIST: case STATEMENT_LIST:
{ {
tree_stmt_iterator iter; tree_stmt_iterator iter;
if (!ignore) gcc_assert (ignore);
abort ();
for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter)) for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier); expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
...@@ -7992,11 +7949,10 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -7992,11 +7949,10 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
tree then_ = TREE_OPERAND (exp, 1); tree then_ = TREE_OPERAND (exp, 1);
tree else_ = TREE_OPERAND (exp, 2); tree else_ = TREE_OPERAND (exp, 2);
if (TREE_CODE (then_) != GOTO_EXPR gcc_assert (TREE_CODE (then_) == GOTO_EXPR
|| TREE_CODE (GOTO_DESTINATION (then_)) != LABEL_DECL && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
|| TREE_CODE (else_) != GOTO_EXPR && TREE_CODE (else_) == GOTO_EXPR
|| TREE_CODE (GOTO_DESTINATION (else_)) != LABEL_DECL) && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
abort ();
jumpif (pred, label_rtx (GOTO_DESTINATION (then_))); jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
return expand_expr (else_, const0_rtx, VOIDmode, 0); return expand_expr (else_, const0_rtx, VOIDmode, 0);
...@@ -8007,11 +7963,10 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -8007,11 +7963,10 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
a temporary variable, so that we can evaluate them here a temporary variable, so that we can evaluate them here
for side effect only. If type is void, we must do likewise. */ for side effect only. If type is void, we must do likewise. */
if (TREE_ADDRESSABLE (type) gcc_assert (!TREE_ADDRESSABLE (type)
|| ignore && !ignore
|| TREE_TYPE (TREE_OPERAND (exp, 1)) == void_type_node && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
|| TREE_TYPE (TREE_OPERAND (exp, 2)) == void_type_node) && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
abort ();
/* If we are not to produce a result, we have no target. Otherwise, /* If we are not to produce a result, we have no target. Otherwise,
if a target was specified use it; it will not be used as an if a target was specified use it; it will not be used as an
...@@ -8163,7 +8118,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -8163,7 +8118,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
case EH_FILTER_EXPR: case EH_FILTER_EXPR:
case TRY_FINALLY_EXPR: case TRY_FINALLY_EXPR:
/* Lowered by tree-eh.c. */ /* Lowered by tree-eh.c. */
abort (); gcc_unreachable ();
case WITH_CLEANUP_EXPR: case WITH_CLEANUP_EXPR:
case CLEANUP_POINT_EXPR: case CLEANUP_POINT_EXPR:
...@@ -8185,7 +8140,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -8185,7 +8140,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
case TRUTH_ANDIF_EXPR: case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR: case TRUTH_ORIF_EXPR:
/* Lowered by gimplify.c. */ /* Lowered by gimplify.c. */
abort (); gcc_unreachable ();
case EXC_PTR_EXPR: case EXC_PTR_EXPR:
return get_exception_pointer (cfun); return get_exception_pointer (cfun);
...@@ -8196,7 +8151,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -8196,7 +8151,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
case FDESC_EXPR: case FDESC_EXPR:
/* Function descriptors are not valid except for as /* Function descriptors are not valid except for as
initialization constants, and should not be expanded. */ initialization constants, and should not be expanded. */
abort (); gcc_unreachable ();
case SWITCH_EXPR: case SWITCH_EXPR:
expand_case (exp); expand_case (exp);
...@@ -8232,8 +8187,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, ...@@ -8232,8 +8187,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
target = 0; target = 0;
temp = expand_binop (mode, this_optab, op0, op1, target, temp = expand_binop (mode, this_optab, op0, op1, target,
unsignedp, OPTAB_LIB_WIDEN); unsignedp, OPTAB_LIB_WIDEN);
if (temp == 0) gcc_assert (temp);
abort ();
return REDUCE_BIT_FIELD (temp); return REDUCE_BIT_FIELD (temp);
} }
#undef REDUCE_BIT_FIELD #undef REDUCE_BIT_FIELD
...@@ -8499,7 +8453,7 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) ...@@ -8499,7 +8453,7 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
break; break;
default: default:
abort (); gcc_unreachable ();
} }
/* Put a constant second. */ /* Put a constant second. */
...@@ -8595,8 +8549,7 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) ...@@ -8595,8 +8549,7 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
code = GET_CODE (result); code = GET_CODE (result);
label = gen_label_rtx (); label = gen_label_rtx ();
if (bcc_gen_fctn[(int) code] == 0) gcc_assert (bcc_gen_fctn[(int) code]);
abort ();
emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
emit_move_insn (target, invert ? const1_rtx : const0_rtx); emit_move_insn (target, invert ? const1_rtx : const0_rtx);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment