Commit 7b1b4aed by Steven Bosscher Committed by Andreas Jaeger

gcse.c (record_set_info): Use predicates like REG_P.

	* gcse.c (record_set_info): Use predicates like REG_P.
	(mems_conflict_for_gcse_p): Likewise.
	(load_killed_in_block_p): Likewise.
	(hash_expr_1): Likewise.
	(insert_set_in_table): Likewise.
	(gcse_constant_p): Likewise.
	(hash_scan_set): Likewise.
	(hash_scan_insn): Likewise.
	(canon_list_insert): Likewise.
	(record_last_mem_set_info): Likewise.
	(record_last_set_info): Likewise.
	(compute_hash_table_work): Likewise.
	(mark_set): Likewise.
	(mark_clobber): Likewise.
	(mark_oprs_set): Likewise.
	(compute_transp): Likewise.
	(find_avail_set): Likewise.
	(cprop_insn): Likewise.
	(do_local_cprop): Likewise.
	(cprop): Likewise.
	(find_implicit_sets): Likewise.
	(find_bypass_set): Likewise.
	(bypass_conditional_jumps): Likewise.
	(insert_insn_end_bb): Likewise.
	(pre_insert_copy_insn): Likewise.
	(compute_transpout): Likewise.
	(next_ls_expr): Likewise.
	(invalidate_any_buried_refs): Likewise.
	(compute_ld_motion_mems): Likewise.
	(reg_set_info): Likewise.
	(reg_clear_last_set): Likewise.
	(find_moveable_store): Likewise.
	(compute_store_table): Likewise.
	(find_loads): Likewise.
	(store_killed_in_insn): Likewise.
	(insert_insn_start_bb): Likewise.
	(reg_set_between_after_reload_p): Likewise.
	(reg_used_between_after_reload_p): Likewise.
	(is_jump_table_basic_block): Likewise.
	(gcse_after_reload): Likewise.
	(hash_scan_set_after_reload): Likewise.
	(compute_hash_table_after_reload): Likewise.

Co-Authored-By: Andreas Jaeger <aj@suse.de>

From-SVN: r83026
parent ff6ea709
2004-06-12 Steven Bosscher <stevenb@suse.de>,
Andreas Jaeger <aj@suse.de>
* gcse.c (record_set_info): Use predicates like REG_P.
(mems_conflict_for_gcse_p): Likewise.
(load_killed_in_block_p): Likewise.
(hash_expr_1): Likewise.
(insert_set_in_table): Likewise.
(gcse_constant_p): Likewise.
(hash_scan_set): Likewise.
(hash_scan_insn): Likewise.
(canon_list_insert): Likewise.
(record_last_mem_set_info): Likewise.
(record_last_set_info): Likewise.
(compute_hash_table_work): Likewise.
(mark_set): Likewise.
(mark_clobber): Likewise.
(mark_oprs_set): Likewise.
(compute_transp): Likewise.
(find_avail_set): Likewise.
(cprop_insn): Likewise.
(do_local_cprop): Likewise.
(cprop): Likewise.
(find_implicit_sets): Likewise.
(find_bypass_set): Likewise.
(bypass_conditional_jumps): Likewise.
(insert_insn_end_bb): Likewise.
(pre_insert_copy_insn): Likewise.
(compute_transpout): Likewise.
(next_ls_expr): Likewise.
(invalidate_any_buried_refs): Likewise.
(compute_ld_motion_mems): Likewise.
(reg_set_info): Likewise.
(reg_clear_last_set): Likewise.
(find_moveable_store): Likewise.
(compute_store_table): Likewise.
(find_loads): Likewise.
(store_killed_in_insn): Likewise.
(insert_insn_start_bb): Likewise.
(reg_set_between_after_reload_p): Likewise.
(reg_used_between_after_reload_p): Likewise.
(is_jump_table_basic_block): Likewise.
(gcse_after_reload): Likewise.
(hash_scan_set_after_reload): Likewise.
(compute_hash_table_after_reload): Likewise.
2004-06-12 Steven Bosscher <stevenb@suse.de> 2004-06-12 Steven Bosscher <stevenb@suse.de>
* rtl.h (MEM_P, NONJUMP_INSN_P, CALL_INSN_P): New predicates. * rtl.h (MEM_P, NONJUMP_INSN_P, CALL_INSN_P): New predicates.
......
...@@ -232,8 +232,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA ...@@ -232,8 +232,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
substitutions. substitutions.
PRE is quite expensive in complicated functions because the DFA can take PRE is quite expensive in complicated functions because the DFA can take
awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can a while to converge. Hence we only perform one pass. The parameter
be modified if one wants to experiment. max-gcse-passes can be modified if one wants to experiment.
********************** **********************
...@@ -288,7 +288,6 @@ static FILE *gcse_file; ...@@ -288,7 +288,6 @@ static FILE *gcse_file;
* If we changed any jumps via cprop. * If we changed any jumps via cprop.
* If we added any labels via edge splitting. */ * If we added any labels via edge splitting. */
static int run_jump_opt_after_gcse; static int run_jump_opt_after_gcse;
/* Bitmaps are normally not included in debugging dumps. /* Bitmaps are normally not included in debugging dumps.
...@@ -707,7 +706,7 @@ gcse_main (rtx f, FILE *file) ...@@ -707,7 +706,7 @@ gcse_main (rtx f, FILE *file)
/* Return if there's nothing to do, or it is too expensive. */ /* Return if there's nothing to do, or it is too expensive. */
if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled"))) if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
return 0; return 0;
gcc_obstack_init (&gcse_obstack); gcc_obstack_init (&gcse_obstack);
bytes_used = 0; bytes_used = 0;
...@@ -822,6 +821,7 @@ gcse_main (rtx f, FILE *file) ...@@ -822,6 +821,7 @@ gcse_main (rtx f, FILE *file)
obstack_free (&gcse_obstack, NULL); obstack_free (&gcse_obstack, NULL);
free_reg_set_mem (); free_reg_set_mem ();
/* We are finished with alias. */ /* We are finished with alias. */
end_alias_analysis (); end_alias_analysis ();
allocate_reg_info (max_reg_num (), FALSE, FALSE); allocate_reg_info (max_reg_num (), FALSE, FALSE);
...@@ -1013,7 +1013,8 @@ free_gcse_mem (void) ...@@ -1013,7 +1013,8 @@ free_gcse_mem (void)
ABSALTERED. */ ABSALTERED. */
static void static void
compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table) compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
struct hash_table *table)
{ {
unsigned int i; unsigned int i;
...@@ -1154,7 +1155,7 @@ record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data) ...@@ -1154,7 +1155,7 @@ record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
{ {
rtx record_set_insn = (rtx) data; rtx record_set_insn = (rtx) data;
if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER) if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
record_one_set (REGNO (dest), record_set_insn); record_one_set (REGNO (dest), record_set_insn);
} }
...@@ -1355,7 +1356,7 @@ mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED, ...@@ -1355,7 +1356,7 @@ mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
/* If DEST is not a MEM, then it will not conflict with the load. Note /* If DEST is not a MEM, then it will not conflict with the load. Note
that function calls are assumed to clobber memory, but are handled that function calls are assumed to clobber memory, but are handled
elsewhere. */ elsewhere. */
if (GET_CODE (dest) != MEM) if (! MEM_P (dest))
return; return;
/* If we are setting a MEM in our list of specially recognized MEMs, /* If we are setting a MEM in our list of specially recognized MEMs,
...@@ -1403,7 +1404,7 @@ load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p) ...@@ -1403,7 +1404,7 @@ load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
/* If SETTER is a call everything is clobbered. Note that calls /* If SETTER is a call everything is clobbered. Note that calls
to pure functions are never put on the list, so we need not to pure functions are never put on the list, so we need not
worry about them. */ worry about them. */
if (GET_CODE (setter) == CALL_INSN) if (CALL_P (setter))
return 1; return 1;
/* SETTER must be an INSN of some kind that sets memory. Call /* SETTER must be an INSN of some kind that sets memory. Call
...@@ -1485,14 +1486,14 @@ hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p) ...@@ -1485,14 +1486,14 @@ hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
enum rtx_code code; enum rtx_code code;
const char *fmt; const char *fmt;
/* Used to turn recursion into iteration. We can't rely on GCC's
tail-recursion elimination since we need to keep accumulating values
in HASH. */
if (x == 0) if (x == 0)
return hash; return hash;
/* Used to turn recursion into iteration. We can't rely on GCC's
tail-recursion elimination since we need to keep accumulating values
in HASH. */
repeat: repeat:
code = GET_CODE (x); code = GET_CODE (x);
switch (code) switch (code)
{ {
...@@ -1964,7 +1965,7 @@ insert_set_in_table (rtx x, rtx insn, struct hash_table *table) ...@@ -1964,7 +1965,7 @@ insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
struct occr *cur_occr, *last_occr = NULL; struct occr *cur_occr, *last_occr = NULL;
if (GET_CODE (x) != SET if (GET_CODE (x) != SET
|| GET_CODE (SET_DEST (x)) != REG) || ! REG_P (SET_DEST (x)))
abort (); abort ();
hash = hash_set (REGNO (SET_DEST (x)), table->size); hash = hash_set (REGNO (SET_DEST (x)), table->size);
...@@ -2048,12 +2049,10 @@ gcse_constant_p (rtx x) ...@@ -2048,12 +2049,10 @@ gcse_constant_p (rtx x)
&& GET_CODE (XEXP (x, 1)) == CONST_INT) && GET_CODE (XEXP (x, 1)) == CONST_INT)
return true; return true;
/* Consider a COMPARE of the same registers is a constant /* Consider a COMPARE of the same registers is a constant
if they are not floating point registers. */ if they are not floating point registers. */
if (GET_CODE(x) == COMPARE if (GET_CODE(x) == COMPARE
&& GET_CODE (XEXP (x, 0)) == REG && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
&& GET_CODE (XEXP (x, 1)) == REG
&& REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1)) && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
&& ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))) && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
&& ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1)))) && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
...@@ -2072,10 +2071,10 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table) ...@@ -2072,10 +2071,10 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
rtx dest = SET_DEST (pat); rtx dest = SET_DEST (pat);
rtx note; rtx note;
if (GET_CODE (src) == CALL) if (CALL_P (src))
hash_scan_call (src, insn, table); hash_scan_call (src, insn, table);
else if (GET_CODE (dest) == REG) else if (REG_P (dest))
{ {
unsigned int regno = REGNO (dest); unsigned int regno = REGNO (dest);
rtx tmp; rtx tmp;
...@@ -2105,7 +2104,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table) ...@@ -2105,7 +2104,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
explicitly, it means address of parameter has been taken, explicitly, it means address of parameter has been taken,
so we should not extend the lifetime of the pseudo. */ so we should not extend the lifetime of the pseudo. */
&& ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
|| GET_CODE (XEXP (note, 0)) != MEM)) || ! MEM_P (XEXP (note, 0))))
{ {
/* An expression is not anticipatable if its operands are /* An expression is not anticipatable if its operands are
modified before this insn or if this is not the only SET in modified before this insn or if this is not the only SET in
...@@ -2124,7 +2123,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table) ...@@ -2124,7 +2123,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
/* Record sets for constant/copy propagation. */ /* Record sets for constant/copy propagation. */
else if (table->set_p else if (table->set_p
&& regno >= FIRST_PSEUDO_REGISTER && regno >= FIRST_PSEUDO_REGISTER
&& ((GET_CODE (src) == REG && ((REG_P (src)
&& REGNO (src) >= FIRST_PSEUDO_REGISTER && REGNO (src) >= FIRST_PSEUDO_REGISTER
&& can_copy_p (GET_MODE (dest)) && can_copy_p (GET_MODE (dest))
&& REGNO (src) != regno) && REGNO (src) != regno)
...@@ -2140,7 +2139,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table) ...@@ -2140,7 +2139,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
/* In case of store we want to consider the memory value as available in /* In case of store we want to consider the memory value as available in
the REG stored in that memory. This makes it possible to remove the REG stored in that memory. This makes it possible to remove
redundant loads from due to stores to the same location. */ redundant loads from due to stores to the same location. */
else if (flag_gcse_las && GET_CODE (src) == REG && GET_CODE (dest) == MEM) else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
{ {
unsigned int regno = REGNO (src); unsigned int regno = REGNO (src);
...@@ -2164,7 +2163,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table) ...@@ -2164,7 +2163,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
explicitly, it means address of parameter has been taken, explicitly, it means address of parameter has been taken,
so we should not extend the lifetime of the pseudo. */ so we should not extend the lifetime of the pseudo. */
&& ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
|| GET_CODE (XEXP (note, 0)) != MEM)) || ! MEM_P (XEXP (note, 0))))
{ {
/* Stores are never anticipatable. */ /* Stores are never anticipatable. */
int antic_p = 0; int antic_p = 0;
...@@ -2232,13 +2231,13 @@ hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block) ...@@ -2232,13 +2231,13 @@ hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
hash_scan_set (x, insn, table); hash_scan_set (x, insn, table);
else if (GET_CODE (x) == CLOBBER) else if (GET_CODE (x) == CLOBBER)
hash_scan_clobber (x, insn, table); hash_scan_clobber (x, insn, table);
else if (GET_CODE (x) == CALL) else if (CALL_P (x))
hash_scan_call (x, insn, table); hash_scan_call (x, insn, table);
} }
else if (GET_CODE (pat) == CLOBBER) else if (GET_CODE (pat) == CLOBBER)
hash_scan_clobber (pat, insn, table); hash_scan_clobber (pat, insn, table);
else if (GET_CODE (pat) == CALL) else if (CALL_P (pat))
hash_scan_call (pat, insn, table); hash_scan_call (pat, insn, table);
} }
...@@ -2331,7 +2330,7 @@ canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED, ...@@ -2331,7 +2330,7 @@ canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
that function calls are assumed to clobber memory, but are handled that function calls are assumed to clobber memory, but are handled
elsewhere. */ elsewhere. */
if (GET_CODE (dest) != MEM) if (! MEM_P (dest))
return; return;
dest_addr = get_addr (XEXP (dest, 0)); dest_addr = get_addr (XEXP (dest, 0));
...@@ -2360,7 +2359,7 @@ record_last_mem_set_info (rtx insn) ...@@ -2360,7 +2359,7 @@ record_last_mem_set_info (rtx insn)
modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]); modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
bitmap_set_bit (modify_mem_list_set, bb); bitmap_set_bit (modify_mem_list_set, bb);
if (GET_CODE (insn) == CALL_INSN) if (CALL_P (insn))
{ {
/* Note that traversals of this loop (other than for free-ing) /* Note that traversals of this loop (other than for free-ing)
will break after encountering a CALL_INSN. So, there's no will break after encountering a CALL_INSN. So, there's no
...@@ -2385,9 +2384,9 @@ record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data) ...@@ -2385,9 +2384,9 @@ record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
if (GET_CODE (dest) == SUBREG) if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest); dest = SUBREG_REG (dest);
if (GET_CODE (dest) == REG) if (REG_P (dest))
record_last_reg_set_info (last_set_insn, REGNO (dest)); record_last_reg_set_info (last_set_insn, REGNO (dest));
else if (GET_CODE (dest) == MEM else if (MEM_P (dest)
/* Ignore pushes, they clobber nothing. */ /* Ignore pushes, they clobber nothing. */
&& ! push_operand (dest, GET_MODE (dest))) && ! push_operand (dest, GET_MODE (dest)))
record_last_mem_set_info (last_set_insn); record_last_mem_set_info (last_set_insn);
...@@ -2446,7 +2445,7 @@ compute_hash_table_work (struct hash_table *table) ...@@ -2446,7 +2445,7 @@ compute_hash_table_work (struct hash_table *table)
if (! INSN_P (insn)) if (! INSN_P (insn))
continue; continue;
if (GET_CODE (insn) == CALL_INSN) if (CALL_P (insn))
{ {
bool clobbers_all = false; bool clobbers_all = false;
#ifdef NON_SAVING_SETJMP #ifdef NON_SAVING_SETJMP
...@@ -2741,12 +2740,12 @@ mark_set (rtx pat, rtx insn) ...@@ -2741,12 +2740,12 @@ mark_set (rtx pat, rtx insn)
|| GET_CODE (dest) == STRICT_LOW_PART) || GET_CODE (dest) == STRICT_LOW_PART)
dest = XEXP (dest, 0); dest = XEXP (dest, 0);
if (GET_CODE (dest) == REG) if (REG_P (dest))
SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest)); SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
else if (GET_CODE (dest) == MEM) else if (MEM_P (dest))
record_last_mem_set_info (insn); record_last_mem_set_info (insn);
if (GET_CODE (SET_SRC (pat)) == CALL) if (CALL_P (SET_SRC (pat)))
mark_call (insn); mark_call (insn);
} }
...@@ -2760,7 +2759,7 @@ mark_clobber (rtx pat, rtx insn) ...@@ -2760,7 +2759,7 @@ mark_clobber (rtx pat, rtx insn)
while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART) while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
clob = XEXP (clob, 0); clob = XEXP (clob, 0);
if (GET_CODE (clob) == REG) if (REG_P (clob))
SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob)); SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
else else
record_last_mem_set_info (insn); record_last_mem_set_info (insn);
...@@ -2786,13 +2785,13 @@ mark_oprs_set (rtx insn) ...@@ -2786,13 +2785,13 @@ mark_oprs_set (rtx insn)
mark_set (x, insn); mark_set (x, insn);
else if (GET_CODE (x) == CLOBBER) else if (GET_CODE (x) == CLOBBER)
mark_clobber (x, insn); mark_clobber (x, insn);
else if (GET_CODE (x) == CALL) else if (CALL_P (x))
mark_call (insn); mark_call (insn);
} }
else if (GET_CODE (pat) == CLOBBER) else if (GET_CODE (pat) == CLOBBER)
mark_clobber (pat, insn); mark_clobber (pat, insn);
else if (GET_CODE (pat) == CALL) else if (CALL_P (pat))
mark_call (insn); mark_call (insn);
} }
...@@ -2897,7 +2896,7 @@ compute_transp (rtx x, int indx, sbitmap *bmap, int set_p) ...@@ -2897,7 +2896,7 @@ compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
{ {
rtx dest, dest_addr; rtx dest, dest_addr;
if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN) if (CALL_P (XEXP (list_entry, 0)))
{ {
if (set_p) if (set_p)
SET_BIT (bmap[bb->index], indx); SET_BIT (bmap[bb->index], indx);
...@@ -3156,7 +3155,7 @@ find_avail_set (int regno, rtx insn) ...@@ -3156,7 +3155,7 @@ find_avail_set (int regno, rtx insn)
/* If the source of the set is anything except a register, then /* If the source of the set is anything except a register, then
we have reached the end of the copy chain. */ we have reached the end of the copy chain. */
if (GET_CODE (src) != REG) if (! REG_P (src))
break; break;
/* Follow the copy chain, ie start another iteration of the loop /* Follow the copy chain, ie start another iteration of the loop
...@@ -3379,7 +3378,7 @@ cprop_insn (rtx insn, int alter_jumps) ...@@ -3379,7 +3378,7 @@ cprop_insn (rtx insn, int alter_jumps)
return 1; return 1;
} }
} }
else if (GET_CODE (src) == REG else if (REG_P (src)
&& REGNO (src) >= FIRST_PSEUDO_REGISTER && REGNO (src) >= FIRST_PSEUDO_REGISTER
&& REGNO (src) != regno) && REGNO (src) != regno)
{ {
...@@ -3461,7 +3460,7 @@ do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp) ...@@ -3461,7 +3460,7 @@ do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
/* Rule out USE instructions and ASM statements as we don't want to /* Rule out USE instructions and ASM statements as we don't want to
change the hard registers mentioned. */ change the hard registers mentioned. */
if (GET_CODE (x) == REG if (REG_P (x)
&& (REGNO (x) >= FIRST_PSEUDO_REGISTER && (REGNO (x) >= FIRST_PSEUDO_REGISTER
|| (GET_CODE (PATTERN (insn)) != USE || (GET_CODE (PATTERN (insn)) != USE
&& asm_noperands (PATTERN (insn)) < 0))) && asm_noperands (PATTERN (insn)) < 0)))
...@@ -3488,7 +3487,7 @@ do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp) ...@@ -3488,7 +3487,7 @@ do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
explicitly, it means address of parameter has been taken, explicitly, it means address of parameter has been taken,
so we should not extend the lifetime of the pseudo. */ so we should not extend the lifetime of the pseudo. */
&& (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX)) && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
|| GET_CODE (XEXP (note, 0)) != MEM)) || ! MEM_P (XEXP (note, 0))))
newreg = this_rtx; newreg = this_rtx;
} }
if (newcnst && constprop_register (insn, x, newcnst, alter_jumps)) if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
...@@ -3664,7 +3663,7 @@ cprop (int alter_jumps) ...@@ -3664,7 +3663,7 @@ cprop (int alter_jumps)
/* Keep track of everything modified by this insn. */ /* Keep track of everything modified by this insn. */
/* ??? Need to be careful w.r.t. mods done to INSN. Don't /* ??? Need to be careful w.r.t. mods done to INSN. Don't
call mark_oprs_set if we turned the insn into a NOTE. */ call mark_oprs_set if we turned the insn into a NOTE. */
if (GET_CODE (insn) != NOTE) if (! NOTE_P (insn))
mark_oprs_set (insn); mark_oprs_set (insn);
} }
} }
...@@ -3791,7 +3790,7 @@ find_implicit_sets (void) ...@@ -3791,7 +3790,7 @@ find_implicit_sets (void)
if (cond if (cond
&& (GET_CODE (cond) == EQ || GET_CODE (cond) == NE) && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
&& GET_CODE (XEXP (cond, 0)) == REG && REG_P (XEXP (cond, 0))
&& REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
&& implicit_set_cond_p (cond)) && implicit_set_cond_p (cond))
{ {
...@@ -3913,7 +3912,7 @@ find_bypass_set (int regno, int bb) ...@@ -3913,7 +3912,7 @@ find_bypass_set (int regno, int bb)
if (gcse_constant_p (src)) if (gcse_constant_p (src))
result = set; result = set;
if (GET_CODE (src) != REG) if (! REG_P (src))
break; break;
regno = REGNO (src); regno = REGNO (src);
...@@ -4050,7 +4049,7 @@ bypass_block (basic_block bb, rtx setcc, rtx jump) ...@@ -4050,7 +4049,7 @@ bypass_block (basic_block bb, rtx setcc, rtx jump)
/* Avoid unification of the edge with other edges from original /* Avoid unification of the edge with other edges from original
branch. We would end up emitting the instruction on "both" branch. We would end up emitting the instruction on "both"
edges. */ edges. */
if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))) if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
{ {
edge e2; edge e2;
...@@ -4141,7 +4140,7 @@ bypass_conditional_jumps (void) ...@@ -4141,7 +4140,7 @@ bypass_conditional_jumps (void)
else else
break; break;
} }
else if (GET_CODE (insn) == JUMP_INSN) else if (JUMP_P (insn))
{ {
if ((any_condjump_p (insn) || computed_jump_p (insn)) if ((any_condjump_p (insn) || computed_jump_p (insn))
&& onlyjump_p (insn)) && onlyjump_p (insn))
...@@ -4427,7 +4426,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre) ...@@ -4427,7 +4426,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
handle cc0, etc. properly]. Similarly we need to care trapping handle cc0, etc. properly]. Similarly we need to care trapping
instructions in presence of non-call exceptions. */ instructions in presence of non-call exceptions. */
if (GET_CODE (insn) == JUMP_INSN if (JUMP_P (insn)
|| (GET_CODE (insn) == INSN || (GET_CODE (insn) == INSN
&& (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))) && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
{ {
...@@ -4470,7 +4469,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre) ...@@ -4470,7 +4469,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
/* Likewise if the last insn is a call, as will happen in the presence /* Likewise if the last insn is a call, as will happen in the presence
of exception handling. */ of exception handling. */
else if (GET_CODE (insn) == CALL_INSN else if (CALL_P (insn)
&& (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))) && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
{ {
/* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers, /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
...@@ -4500,7 +4499,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre) ...@@ -4500,7 +4499,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
If we inserted before the CODE_LABEL, then we would be putting If we inserted before the CODE_LABEL, then we would be putting
the insn in the wrong basic block. In that case, put the insn the insn in the wrong basic block. In that case, put the insn
after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */ after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
while (GET_CODE (insn) == CODE_LABEL while (LABEL_P (insn)
|| NOTE_INSN_BASIC_BLOCK_P (insn)) || NOTE_INSN_BASIC_BLOCK_P (insn))
insn = NEXT_INSN (insn); insn = NEXT_INSN (insn);
...@@ -4663,7 +4662,7 @@ pre_insert_copy_insn (struct expr *expr, rtx insn) ...@@ -4663,7 +4662,7 @@ pre_insert_copy_insn (struct expr *expr, rtx insn)
else else
abort (); abort ();
if (GET_CODE (SET_DEST (set)) == REG) if (REG_P (SET_DEST (set)))
{ {
old_reg = SET_DEST (set); old_reg = SET_DEST (set);
/* Check if we can modify the set destination in the original insn. */ /* Check if we can modify the set destination in the original insn. */
...@@ -4736,9 +4735,9 @@ pre_insert_copies (void) ...@@ -4736,9 +4735,9 @@ pre_insert_copies (void)
expression wasn't deleted anywhere. */ expression wasn't deleted anywhere. */
if (expr->reaching_reg == NULL) if (expr->reaching_reg == NULL)
continue; continue;
/* Set when we add a copy for that expression. */ /* Set when we add a copy for that expression. */
added_copy = 0; added_copy = 0;
for (occr = expr->antic_occr; occr != NULL; occr = occr->next) for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
{ {
...@@ -4771,7 +4770,7 @@ pre_insert_copies (void) ...@@ -4771,7 +4770,7 @@ pre_insert_copies (void)
} }
} }
if (added_copy) if (added_copy)
update_ld_motion_stores (expr); update_ld_motion_stores (expr);
} }
} }
...@@ -5050,12 +5049,12 @@ compute_transpout (void) ...@@ -5050,12 +5049,12 @@ compute_transpout (void)
/* Note that flow inserted a nop a the end of basic blocks that /* Note that flow inserted a nop a the end of basic blocks that
end in call instructions for reasons other than abnormal end in call instructions for reasons other than abnormal
control flow. */ control flow. */
if (GET_CODE (BB_END (bb)) != CALL_INSN) if (! CALL_P (BB_END (bb)))
continue; continue;
for (i = 0; i < expr_hash_table.size; i++) for (i = 0; i < expr_hash_table.size; i++)
for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash) for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
if (GET_CODE (expr->expr) == MEM) if (MEM_P (expr->expr))
{ {
if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0))) && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
...@@ -5591,7 +5590,7 @@ next_ls_expr (struct ls_expr * ptr) ...@@ -5591,7 +5590,7 @@ next_ls_expr (struct ls_expr * ptr)
static int static int
simple_mem (rtx x) simple_mem (rtx x)
{ {
if (GET_CODE (x) != MEM) if (! MEM_P (x))
return 0; return 0;
if (MEM_VOLATILE_P (x)) if (MEM_VOLATILE_P (x))
...@@ -5635,7 +5634,7 @@ invalidate_any_buried_refs (rtx x) ...@@ -5635,7 +5634,7 @@ invalidate_any_buried_refs (rtx x)
struct ls_expr * ptr; struct ls_expr * ptr;
/* Invalidate it in the list. */ /* Invalidate it in the list. */
if (GET_CODE (x) == MEM && simple_mem (x)) if (MEM_P (x) && simple_mem (x))
{ {
ptr = ldst_entry (x); ptr = ldst_entry (x);
ptr->invalid = 1; ptr->invalid = 1;
...@@ -5685,10 +5684,10 @@ compute_ld_motion_mems (void) ...@@ -5685,10 +5684,10 @@ compute_ld_motion_mems (void)
rtx dest = SET_DEST (PATTERN (insn)); rtx dest = SET_DEST (PATTERN (insn));
/* Check for a simple LOAD... */ /* Check for a simple LOAD... */
if (GET_CODE (src) == MEM && simple_mem (src)) if (MEM_P (src) && simple_mem (src))
{ {
ptr = ldst_entry (src); ptr = ldst_entry (src);
if (GET_CODE (dest) == REG) if (REG_P (dest))
ptr->loads = alloc_INSN_LIST (insn, ptr->loads); ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
else else
ptr->invalid = 1; ptr->invalid = 1;
...@@ -5703,11 +5702,11 @@ compute_ld_motion_mems (void) ...@@ -5703,11 +5702,11 @@ compute_ld_motion_mems (void)
will block any movement we might do later. We only care will block any movement we might do later. We only care
about this exact pattern since those are the only about this exact pattern since those are the only
circumstance that we will ignore the aliasing info. */ circumstance that we will ignore the aliasing info. */
if (GET_CODE (dest) == MEM && simple_mem (dest)) if (MEM_P (dest) && simple_mem (dest))
{ {
ptr = ldst_entry (dest); ptr = ldst_entry (dest);
if (GET_CODE (src) != MEM if (! MEM_P (src)
&& GET_CODE (src) != ASM_OPERANDS && GET_CODE (src) != ASM_OPERANDS
/* Check for REG manually since want_to_gcse_p /* Check for REG manually since want_to_gcse_p
returns 0 for all REGs. */ returns 0 for all REGs. */
...@@ -5860,7 +5859,7 @@ reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, ...@@ -5860,7 +5859,7 @@ reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
if (GET_CODE (dest) == SUBREG) if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest); dest = SUBREG_REG (dest);
if (GET_CODE (dest) == REG) if (REG_P (dest))
{ {
regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn); regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
if (bb_reg) if (bb_reg)
...@@ -5880,7 +5879,7 @@ reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED, ...@@ -5880,7 +5879,7 @@ reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
if (GET_CODE (dest) == SUBREG) if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest); dest = SUBREG_REG (dest);
if (GET_CODE (dest) == REG && if (REG_P (dest) &&
dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn)) dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
dead_vec[REGNO (dest)] = 0; dead_vec[REGNO (dest)] = 0;
} }
...@@ -6025,7 +6024,7 @@ find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after) ...@@ -6025,7 +6024,7 @@ find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
dest = SET_DEST (set); dest = SET_DEST (set);
if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest) if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
|| GET_MODE (dest) == BLKmode) || GET_MODE (dest) == BLKmode)
return; return;
...@@ -6137,7 +6136,7 @@ compute_store_table (void) ...@@ -6137,7 +6136,7 @@ compute_store_table (void)
if (! INSN_P (insn)) if (! INSN_P (insn))
continue; continue;
if (GET_CODE (insn) == CALL_INSN) if (CALL_P (insn))
{ {
bool clobbers_all = false; bool clobbers_all = false;
#ifdef NON_SAVING_SETJMP #ifdef NON_SAVING_SETJMP
...@@ -6170,7 +6169,7 @@ compute_store_table (void) ...@@ -6170,7 +6169,7 @@ compute_store_table (void)
if (! INSN_P (insn)) if (! INSN_P (insn))
continue; continue;
if (GET_CODE (insn) == CALL_INSN) if (CALL_P (insn))
{ {
bool clobbers_all = false; bool clobbers_all = false;
#ifdef NON_SAVING_SETJMP #ifdef NON_SAVING_SETJMP
...@@ -6194,7 +6193,7 @@ compute_store_table (void) ...@@ -6194,7 +6193,7 @@ compute_store_table (void)
/* Unmark regs that are no longer set. */ /* Unmark regs that are no longer set. */
compute_store_table_current_insn = insn; compute_store_table_current_insn = insn;
note_stores (pat, reg_clear_last_set, last_set_in); note_stores (pat, reg_clear_last_set, last_set_in);
if (GET_CODE (insn) == CALL_INSN) if (CALL_P (insn))
{ {
bool clobbers_all = false; bool clobbers_all = false;
#ifdef NON_SAVING_SETJMP #ifdef NON_SAVING_SETJMP
...@@ -6288,7 +6287,7 @@ find_loads (rtx x, rtx store_pattern, int after) ...@@ -6288,7 +6287,7 @@ find_loads (rtx x, rtx store_pattern, int after)
if (GET_CODE (x) == SET) if (GET_CODE (x) == SET)
x = SET_SRC (x); x = SET_SRC (x);
if (GET_CODE (x) == MEM) if (MEM_P (x))
{ {
if (load_kills_store (x, store_pattern, after)) if (load_kills_store (x, store_pattern, after))
return true; return true;
...@@ -6320,7 +6319,7 @@ store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after) ...@@ -6320,7 +6319,7 @@ store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
if (!INSN_P (insn)) if (!INSN_P (insn))
return false; return false;
if (GET_CODE (insn) == CALL_INSN) if (CALL_P (insn))
{ {
/* A normal or pure call might read from pattern, /* A normal or pure call might read from pattern,
but a const call will not. */ but a const call will not. */
...@@ -6352,7 +6351,7 @@ store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after) ...@@ -6352,7 +6351,7 @@ store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
dest = XEXP (dest, 0); dest = XEXP (dest, 0);
/* Check for memory stores to aliased objects. */ /* Check for memory stores to aliased objects. */
if (GET_CODE (dest) == MEM if (MEM_P (dest)
&& !expr_equiv_p (dest, x)) && !expr_equiv_p (dest, x))
{ {
if (after) if (after)
...@@ -6537,8 +6536,8 @@ insert_insn_start_bb (rtx insn, basic_block bb) ...@@ -6537,8 +6536,8 @@ insert_insn_start_bb (rtx insn, basic_block bb)
rtx before = BB_HEAD (bb); rtx before = BB_HEAD (bb);
while (before != 0) while (before != 0)
{ {
if (GET_CODE (before) != CODE_LABEL if (! LABEL_P (before)
&& (GET_CODE (before) != NOTE && (! NOTE_P (before)
|| NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK)) || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
break; break;
prev = before; prev = before;
...@@ -6658,7 +6657,7 @@ remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr) ...@@ -6658,7 +6657,7 @@ remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
act = stack[--stack_top]; act = stack[--stack_top];
} }
bb = act->dest; bb = act->dest;
if (bb == EXIT_BLOCK_PTR if (bb == EXIT_BLOCK_PTR
|| TEST_BIT (visited, bb->index)) || TEST_BIT (visited, bb->index))
{ {
...@@ -6677,7 +6676,7 @@ remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr) ...@@ -6677,7 +6676,7 @@ remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
} }
else else
last = NEXT_INSN (BB_END (bb)); last = NEXT_INSN (BB_END (bb));
for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn)) for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
if (INSN_P (insn)) if (INSN_P (insn))
{ {
...@@ -6940,7 +6939,7 @@ is_too_expensive (const char *pass) ...@@ -6940,7 +6939,7 @@ is_too_expensive (const char *pass)
/* Trying to perform global optimizations on flow graphs which have /* Trying to perform global optimizations on flow graphs which have
a high connectivity will take a long time and is unlikely to be a high connectivity will take a long time and is unlikely to be
particularly useful. particularly useful.
In normal circumstances a cfg should have about twice as many In normal circumstances a cfg should have about twice as many
edges as blocks. But we do not want to punish small functions edges as blocks. But we do not want to punish small functions
which have a couple switch statements. Rather than simply which have a couple switch statements. Rather than simply
...@@ -6951,7 +6950,7 @@ is_too_expensive (const char *pass) ...@@ -6951,7 +6950,7 @@ is_too_expensive (const char *pass)
if (warn_disabled_optimization) if (warn_disabled_optimization)
warning ("%s: %d basic blocks and %d edges/basic block", warning ("%s: %d basic blocks and %d edges/basic block",
pass, n_basic_blocks, n_edges / n_basic_blocks); pass, n_basic_blocks, n_edges / n_basic_blocks);
return true; return true;
} }
...@@ -7030,7 +7029,7 @@ reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn) ...@@ -7030,7 +7029,7 @@ reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
rtx insn; rtx insn;
int regno; int regno;
if (GET_CODE (reg) != REG) if (! REG_P (reg))
abort (); abort ();
regno = REGNO (reg); regno = REGNO (reg);
...@@ -7048,7 +7047,7 @@ reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn) ...@@ -7048,7 +7047,7 @@ reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
if (INSN_P (insn)) if (INSN_P (insn))
{ {
if (FIND_REG_INC_NOTE (insn, reg) if (FIND_REG_INC_NOTE (insn, reg)
|| (GET_CODE (insn) == CALL_INSN || (CALL_P (insn)
&& call_used_regs[regno]) && call_used_regs[regno])
|| find_reg_fusage (insn, CLOBBER, reg)) || find_reg_fusage (insn, CLOBBER, reg))
return insn; return insn;
...@@ -7069,7 +7068,7 @@ reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn) ...@@ -7069,7 +7068,7 @@ reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
rtx insn; rtx insn;
int regno; int regno;
if (GET_CODE (reg) != REG) if (! REG_P (reg))
return to_insn; return to_insn;
regno = REGNO (reg); regno = REGNO (reg);
...@@ -7084,7 +7083,7 @@ reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn) ...@@ -7084,7 +7083,7 @@ reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
insn = NEXT_INSN (insn)) insn = NEXT_INSN (insn))
if (INSN_P (insn) if (INSN_P (insn)
&& (reg_overlap_mentioned_p (reg, PATTERN (insn)) && (reg_overlap_mentioned_p (reg, PATTERN (insn))
|| (GET_CODE (insn) == CALL_INSN || (CALL_P (insn)
&& call_used_regs[regno]) && call_used_regs[regno])
|| find_reg_fusage (insn, USE, reg) || find_reg_fusage (insn, USE, reg)
|| find_reg_fusage (insn, CLOBBER, reg))) || find_reg_fusage (insn, CLOBBER, reg)))
...@@ -7097,9 +7096,9 @@ reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn) ...@@ -7097,9 +7096,9 @@ reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
static rtx static rtx
get_avail_load_store_reg (rtx insn) get_avail_load_store_reg (rtx insn)
{ {
if (GET_CODE (SET_DEST (PATTERN (insn))) == REG) /* A load. */ if (REG_P (SET_DEST (PATTERN (insn)))) /* A load. */
return SET_DEST(PATTERN(insn)); return SET_DEST(PATTERN(insn));
if (GET_CODE (SET_SRC (PATTERN (insn))) == REG) /* A store. */ if (REG_P (SET_SRC (PATTERN (insn)))) /* A store. */
return SET_SRC (PATTERN (insn)); return SET_SRC (PATTERN (insn));
abort (); abort ();
} }
...@@ -7111,9 +7110,7 @@ is_jump_table_basic_block (basic_block bb) ...@@ -7111,9 +7110,7 @@ is_jump_table_basic_block (basic_block bb)
{ {
rtx insn = BB_END (bb); rtx insn = BB_END (bb);
if (GET_CODE (insn) == JUMP_INSN && if (JUMP_TABLE_DATA_P (insn))
(GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
return true; return true;
return false; return false;
} }
...@@ -7324,7 +7321,7 @@ eliminate_partially_redundant_loads (basic_block bb, rtx insn, ...@@ -7324,7 +7321,7 @@ eliminate_partially_redundant_loads (basic_block bb, rtx insn,
delete_insn (insn); delete_insn (insn);
else else
a_occr->deleted_p = 1; a_occr->deleted_p = 1;
cleanup: cleanup:
while (unavail_occrs) while (unavail_occrs)
...@@ -7380,8 +7377,8 @@ gcse_after_reload (void) ...@@ -7380,8 +7377,8 @@ gcse_after_reload (void)
/* Is it a load - of the form (set (reg) (mem))? */ /* Is it a load - of the form (set (reg) (mem))? */
if (GET_CODE (insn) == INSN if (GET_CODE (insn) == INSN
&& GET_CODE (PATTERN (insn)) == SET && GET_CODE (PATTERN (insn)) == SET
&& GET_CODE (SET_DEST (PATTERN (insn))) == REG && REG_P (SET_DEST (PATTERN (insn)))
&& GET_CODE (SET_SRC (PATTERN (insn))) == MEM) && MEM_P (SET_SRC (PATTERN (insn))))
{ {
rtx pat = PATTERN (insn); rtx pat = PATTERN (insn);
rtx src = SET_SRC (pat); rtx src = SET_SRC (pat);
...@@ -7435,10 +7432,10 @@ hash_scan_set_after_reload (rtx pat, rtx insn, struct hash_table *table) ...@@ -7435,10 +7432,10 @@ hash_scan_set_after_reload (rtx pat, rtx insn, struct hash_table *table)
rtx src = SET_SRC (pat); rtx src = SET_SRC (pat);
rtx dest = SET_DEST (pat); rtx dest = SET_DEST (pat);
if (GET_CODE (src) != MEM && GET_CODE (dest) != MEM) if (! MEM_P (src) && ! MEM_P (dest))
return; return;
if (GET_CODE (dest) == REG) if (REG_P (dest))
{ {
if (/* Don't GCSE something if we can't do a reg/reg copy. */ if (/* Don't GCSE something if we can't do a reg/reg copy. */
can_copy_p (GET_MODE (dest)) can_copy_p (GET_MODE (dest))
...@@ -7458,7 +7455,7 @@ hash_scan_set_after_reload (rtx pat, rtx insn, struct hash_table *table) ...@@ -7458,7 +7455,7 @@ hash_scan_set_after_reload (rtx pat, rtx insn, struct hash_table *table)
insert_expr_in_table (src, GET_MODE (dest), insn, 0, 1, table); insert_expr_in_table (src, GET_MODE (dest), insn, 0, 1, table);
} }
} }
else if ((GET_CODE (src) == REG)) else if (REG_P (src))
{ {
/* Only record sets of pseudo-regs in the hash table. */ /* Only record sets of pseudo-regs in the hash table. */
if (/* Don't GCSE something if we can't do a reg/reg copy. */ if (/* Don't GCSE something if we can't do a reg/reg copy. */
...@@ -7528,7 +7525,7 @@ compute_hash_table_after_reload (struct hash_table *table) ...@@ -7528,7 +7525,7 @@ compute_hash_table_after_reload (struct hash_table *table)
if (! INSN_P (insn)) if (! INSN_P (insn))
continue; continue;
if (GET_CODE (insn) == CALL_INSN) if (CALL_P (insn))
{ {
bool clobbers_all = false; bool clobbers_all = false;
...@@ -7555,12 +7552,12 @@ compute_hash_table_after_reload (struct hash_table *table) ...@@ -7555,12 +7552,12 @@ compute_hash_table_after_reload (struct hash_table *table)
src = SET_SRC (PATTERN (insn)); src = SET_SRC (PATTERN (insn));
dest = SET_DEST (PATTERN (insn)); dest = SET_DEST (PATTERN (insn));
if (GET_CODE (src) == MEM && auto_inc_p (XEXP (src, 0))) if (MEM_P (src) && auto_inc_p (XEXP (src, 0)))
{ {
regno = REGNO (XEXP (XEXP (src, 0), 0)); regno = REGNO (XEXP (XEXP (src, 0), 0));
record_last_reg_set_info (insn, regno); record_last_reg_set_info (insn, regno);
} }
if (GET_CODE (dest) == MEM && auto_inc_p (XEXP (dest, 0))) if (MEM_P (dest) && auto_inc_p (XEXP (dest, 0)))
{ {
regno = REGNO (XEXP (XEXP (dest, 0), 0)); regno = REGNO (XEXP (XEXP (dest, 0), 0));
record_last_reg_set_info (insn, regno); record_last_reg_set_info (insn, regno);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment