Commit 2ca202e7 by Kazu Hirata Committed by Kazu Hirata

bt-load.c, [...]: Use JUMP_P, LABEL_P, REG_P, MEM_P, NONJUMP_INSN_P, and INSN_P where appropriate.

	* bt-load.c, cfgexpand.c, dwarf2out.c, emit-rtl.c, expr.c,
	function.c, global.c, lcm.c, loop-invariant.c, optabs.c,
	reorg.c, resource.c, tree-ssa-loop-ivopts.c, value-prof.c: Use
	JUMP_P, LABEL_P, REG_P, MEM_P, NONJUMP_INSN_P, and INSN_P
	where appropriate.

From-SVN: r98913
parent b6f9abcf
......@@ -7,6 +7,12 @@
* tree.h (edge_def): Remove.
* bt-load.c, cfgexpand.c, dwarf2out.c, emit-rtl.c, expr.c,
function.c, global.c, lcm.c, loop-invariant.c, optabs.c,
reorg.c, resource.c, tree-ssa-loop-ivopts.c, value-prof.c: Use
JUMP_P, LABEL_P, REG_P, MEM_P, NONJUMP_INSN_P, and INSN_P
where appropriate.
2005-04-28 Joseph S. Myers <joseph@codesourcery.com>
* c-typeck.c (build_compound_expr): Correct logic in last change.
......
......@@ -1008,7 +1008,7 @@ btr_def_live_range (btr_def def, HARD_REG_SET *btrs_live_in_range)
def->bb, user->bb,
(flag_btr_bb_exclusive
|| user->insn != BB_END (def->bb)
|| GET_CODE (user->insn) != JUMP_INSN));
|| !JUMP_P (user->insn)));
}
else
{
......@@ -1072,7 +1072,7 @@ combine_btr_defs (btr_def def, HARD_REG_SET *btrs_live_in_range)
def->bb, user->bb,
(flag_btr_bb_exclusive
|| user->insn != BB_END (def->bb)
|| GET_CODE (user->insn) != JUMP_INSN));
|| !JUMP_P (user->insn)));
btr = choose_btr (combined_btrs_live);
if (btr != -1)
......
......@@ -49,15 +49,15 @@ add_reg_br_prob_note (FILE *dump_file, rtx last, int probability)
if (profile_status == PROFILE_ABSENT)
return;
for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
if (GET_CODE (last) == JUMP_INSN)
if (JUMP_P (last))
{
/* It is common to emit condjump-around-jump sequence when we don't know
how to reverse the conditional. Special case this. */
if (!any_condjump_p (last)
|| GET_CODE (NEXT_INSN (last)) != JUMP_INSN
|| !JUMP_P (NEXT_INSN (last))
|| !simplejump_p (NEXT_INSN (last))
|| GET_CODE (NEXT_INSN (NEXT_INSN (last))) != BARRIER
|| GET_CODE (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))) != CODE_LABEL
|| !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
|| !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
|| NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
goto failed;
gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
......@@ -67,7 +67,7 @@ add_reg_br_prob_note (FILE *dump_file, rtx last, int probability)
REG_NOTES (last));
return;
}
if (!last || GET_CODE (last) != JUMP_INSN || !any_condjump_p (last))
if (!last || !JUMP_P (last) || !any_condjump_p (last))
goto failed;
gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
REG_NOTES (last)
......
......@@ -1482,7 +1482,7 @@ dwarf2out_frame_debug_expr (rtx expr, const char *label)
src = SET_SRC (expr);
dest = SET_DEST (expr);
if (GET_CODE (src) == REG)
if (REG_P (src))
{
rtx rsi = reg_saved_in (src);
if (rsi)
......
......@@ -2697,7 +2697,7 @@ get_first_nonnote_insn (void)
continue;
else
{
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
}
......@@ -2723,7 +2723,7 @@ get_last_nonnote_insn (void)
continue;
else
{
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0,
XVECLEN (PATTERN (insn), 0) - 1);
......
......@@ -2650,7 +2650,7 @@ write_complex_part (rtx cplx, rtx val, bool imag_p)
the original object if it spans an even number of hard regs.
This special case is important for SCmode on 64-bit platforms
where the natural size of floating-point regs is 32-bit. */
|| (GET_CODE (cplx) == REG
|| (REG_P (cplx)
&& REGNO (cplx) < FIRST_PSEUDO_REGISTER
&& hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
/* For MEMs we always try to make a "subreg", that is to adjust
......@@ -2710,7 +2710,7 @@ read_complex_part (rtx cplx, bool imag_p)
the original object if it spans an even number of hard regs.
This special case is important for SCmode on 64-bit platforms
where the natural size of floating-point regs is 32-bit. */
|| (GET_CODE (cplx) == REG
|| (REG_P (cplx)
&& REGNO (cplx) < FIRST_PSEUDO_REGISTER
&& hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
/* For MEMs we always try to make a "subreg", that is to adjust
......@@ -6213,7 +6213,7 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
/* If the DECL isn't in memory, then the DECL wasn't properly
marked TREE_ADDRESSABLE, which will be either a front-end
or a tree optimizer bug. */
gcc_assert (GET_CODE (result) == MEM);
gcc_assert (MEM_P (result));
result = XEXP (result, 0);
/* ??? Is this needed anymore? */
......
......@@ -1244,15 +1244,14 @@ instantiate_virtual_regs (void)
/* Scan through all the insns, instantiating every virtual register still
present. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
|| GET_CODE (insn) == CALL_INSN)
if (INSN_P (insn))
{
instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
if (INSN_DELETED_P (insn))
continue;
instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
NULL_RTX, 0);
......
......@@ -2192,7 +2192,7 @@ mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
basic_block bb = data;
struct bb_info *bb_info = BB_INFO (bb);
if (GET_CODE (*x) == REG && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
{
regno = REGNO (*x);
if (bitmap_bit_p (bb_info->killed, regno)
......
......@@ -1000,7 +1000,7 @@ create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
insert the final mode switch before the return value copy
to its hard register. */
if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1
&& GET_CODE ((last_insn = BB_END (src_bb))) == INSN
&& NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
&& GET_CODE (PATTERN (last_insn)) == USE
&& GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
{
......
......@@ -427,7 +427,7 @@ find_invariant_insn (rtx insn, bool always_reached, bool always_executed,
return;
dest = SET_DEST (set);
if (GET_CODE (dest) != REG
if (!REG_P (dest)
|| HARD_REGISTER_P (dest))
simple = false;
......
......@@ -5554,7 +5554,7 @@ expand_bool_compare_and_swap (rtx mem, rtx old_val, rtx new_val, rtx target)
/* Ensure that if old_val == mem, that we're not comparing
against an old value. */
if (GET_CODE (old_val) == MEM)
if (MEM_P (old_val))
old_val = force_reg (mode, old_val);
subtarget = expand_val_compare_and_swap_1 (mem, old_val, new_val,
......
......@@ -384,7 +384,7 @@ find_end_label (void)
/* If the basic block reorder pass moves the return insn to
some other place try to locate it again and put our
end_of_function_label there. */
while (insn && ! (GET_CODE (insn) == JUMP_INSN
while (insn && ! (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == RETURN)))
insn = PREV_INSN (insn);
if (insn)
......
......@@ -834,10 +834,10 @@ mark_set_resources (rtx x, struct resources *res, int in_dest,
static bool
return_insn_p (rtx insn)
{
if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
return true;
if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
return false;
......
......@@ -2477,7 +2477,7 @@ computation_cost (tree expr)
end_sequence ();
cost = seq_cost (seq);
if (GET_CODE (rslt) == MEM)
if (MEM_P (rslt))
cost += address_cost (XEXP (rslt, 0), TYPE_MODE (type));
return cost;
......
......@@ -232,7 +232,7 @@ find_mem_reference_1 (rtx *expr, void *ret)
{
rtx *mem = ret;
if (GET_CODE (*expr) == MEM)
if (MEM_P (*expr))
{
*mem = *expr;
return 1;
......@@ -283,7 +283,7 @@ insn_prefetch_values_to_profile (rtx insn, histogram_values* values)
histogram_value hist;
/* It only makes sense to look for memory references in ordinary insns. */
if (GET_CODE (insn) != INSN)
if (!NONJUMP_INSN_P (insn))
return false;
if (!find_mem_reference (insn, &mem, &write))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment