Commit 55d796da by Kaveh R. Ghazi Committed by Kaveh Ghazi

recog.c (validate_change_1, [...]): Avoid C++ keywords.

	* recog.c (validate_change_1, validate_change,
	validate_unshare_change, validate_replace_rtx_1, struct
	funny_match, constrain_operands, peephole2_optimize): Avoid C++
	keywords.
	* reload.c (push_secondary_reload, secondary_reload_class,
	scratch_reload_class, find_valid_class, find_reusable_reload,
	push_reload, find_dummy_reload, find_reloads_address_1,
	find_reloads_address_part, find_equiv_reg): Likewise.
	* reload1.c (spill_failure, eliminate_regs_1, allocate_reload_reg,
	choose_reload_regs): Likewise.
	* rtlanal.c (replace_rtx, nonzero_bits1, num_sign_bit_copies1):
	Likewise.
	* rtlhooks.c (gen_lowpart_if_possible): Likewise.
	* sched-ebb.c (add_deps_for_risky_insns): Likewise.
	* sched-rgn.c (concat_INSN_LIST): Likewise.
	* stor-layout.c (mode_for_size, mode_for_size_tree,
	smallest_mode_for_size): Likewise.

From-SVN: r137894
parent d858f359
2008-07-16 Kaveh R. Ghazi <ghazi@caip.rutgers.edu> 2008-07-16 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
* recog.c (validate_change_1, validate_change,
validate_unshare_change, validate_replace_rtx_1, struct
funny_match, constrain_operands, peephole2_optimize): Avoid C++
keywords.
* reload.c (push_secondary_reload, secondary_reload_class,
scratch_reload_class, find_valid_class, find_reusable_reload,
push_reload, find_dummy_reload, find_reloads_address_1,
find_reloads_address_part, find_equiv_reg): Likewise.
* reload1.c (spill_failure, eliminate_regs_1, allocate_reload_reg,
choose_reload_regs): Likewise.
* rtlanal.c (replace_rtx, nonzero_bits1, num_sign_bit_copies1):
Likewise.
* rtlhooks.c (gen_lowpart_if_possible): Likewise.
* sched-ebb.c (add_deps_for_risky_insns): Likewise.
* sched-rgn.c (concat_INSN_LIST): Likewise.
* stor-layout.c (mode_for_size, mode_for_size_tree,
smallest_mode_for_size): Likewise.
2008-07-16 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
* cfg.c (dump_reg_info): Avoid C++ keywords. * cfg.c (dump_reg_info): Avoid C++ keywords.
* dwarf2asm.c (dw2_force_const_mem, * dwarf2asm.c (dw2_force_const_mem,
dw2_asm_output_encoded_addr_rtx): Likewise. dw2_asm_output_encoded_addr_rtx): Likewise.
......
...@@ -183,7 +183,7 @@ static int changes_allocated; ...@@ -183,7 +183,7 @@ static int changes_allocated;
static int num_changes = 0; static int num_changes = 0;
/* Validate a proposed change to OBJECT. LOC is the location in the rtl /* Validate a proposed change to OBJECT. LOC is the location in the rtl
at which NEW will be placed. If OBJECT is zero, no validation is done, at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
the change is simply made. the change is simply made.
Two types of objects are supported: If OBJECT is a MEM, memory_address_p Two types of objects are supported: If OBJECT is a MEM, memory_address_p
...@@ -201,16 +201,16 @@ static int num_changes = 0; ...@@ -201,16 +201,16 @@ static int num_changes = 0;
Otherwise, perform the change and return 1. */ Otherwise, perform the change and return 1. */
static bool static bool
validate_change_1 (rtx object, rtx *loc, rtx new, bool in_group, bool unshare) validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
{ {
rtx old = *loc; rtx old = *loc;
if (old == new || rtx_equal_p (old, new)) if (old == new_rtx || rtx_equal_p (old, new_rtx))
return 1; return 1;
gcc_assert (in_group != 0 || num_changes == 0); gcc_assert (in_group != 0 || num_changes == 0);
*loc = new; *loc = new_rtx;
/* Save the information describing this change. */ /* Save the information describing this change. */
if (num_changes >= changes_allocated) if (num_changes >= changes_allocated)
...@@ -253,18 +253,18 @@ validate_change_1 (rtx object, rtx *loc, rtx new, bool in_group, bool unshare) ...@@ -253,18 +253,18 @@ validate_change_1 (rtx object, rtx *loc, rtx new, bool in_group, bool unshare)
UNSHARE to false. */ UNSHARE to false. */
bool bool
validate_change (rtx object, rtx *loc, rtx new, bool in_group) validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
{ {
return validate_change_1 (object, loc, new, in_group, false); return validate_change_1 (object, loc, new_rtx, in_group, false);
} }
/* Wrapper for validate_change_1 without the UNSHARE argument defaulting /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
UNSHARE to true. */ UNSHARE to true. */
bool bool
validate_unshare_change (rtx object, rtx *loc, rtx new, bool in_group) validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
{ {
return validate_change_1 (object, loc, new, in_group, true); return validate_change_1 (object, loc, new_rtx, in_group, true);
} }
...@@ -525,7 +525,7 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object) ...@@ -525,7 +525,7 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
enum rtx_code code; enum rtx_code code;
enum machine_mode op0_mode = VOIDmode; enum machine_mode op0_mode = VOIDmode;
int prev_changes = num_changes; int prev_changes = num_changes;
rtx new; rtx new_rtx;
if (!x) if (!x)
return; return;
...@@ -633,25 +633,25 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object) ...@@ -633,25 +633,25 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
case SIGN_EXTEND: case SIGN_EXTEND:
if (GET_MODE (XEXP (x, 0)) == VOIDmode) if (GET_MODE (XEXP (x, 0)) == VOIDmode)
{ {
new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0), new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
op0_mode); op0_mode);
/* If any of the above failed, substitute in something that /* If any of the above failed, substitute in something that
we know won't be recognized. */ we know won't be recognized. */
if (!new) if (!new_rtx)
new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
validate_change (object, loc, new, 1); validate_change (object, loc, new_rtx, 1);
} }
break; break;
case SUBREG: case SUBREG:
/* All subregs possible to simplify should be simplified. */ /* All subregs possible to simplify should be simplified. */
new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode, new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
SUBREG_BYTE (x)); SUBREG_BYTE (x));
/* Subregs of VOIDmode operands are incorrect. */ /* Subregs of VOIDmode operands are incorrect. */
if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode) if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
if (new) if (new_rtx)
validate_change (object, loc, new, 1); validate_change (object, loc, new_rtx, 1);
break; break;
case ZERO_EXTRACT: case ZERO_EXTRACT:
case SIGN_EXTRACT: case SIGN_EXTRACT:
...@@ -2200,7 +2200,7 @@ preprocess_constraints (void) ...@@ -2200,7 +2200,7 @@ preprocess_constraints (void)
struct funny_match struct funny_match
{ {
int this, other; int this_op, other;
}; };
int int
...@@ -2350,7 +2350,7 @@ constrain_operands (int strict) ...@@ -2350,7 +2350,7 @@ constrain_operands (int strict)
output op is the one that will be printed. */ output op is the one that will be printed. */
if (val == 2 && strict > 0) if (val == 2 && strict > 0)
{ {
funny_match[funny_match_index].this = opno; funny_match[funny_match_index].this_op = opno;
funny_match[funny_match_index++].other = match; funny_match[funny_match_index++].other = match;
} }
} }
...@@ -2583,7 +2583,7 @@ constrain_operands (int strict) ...@@ -2583,7 +2583,7 @@ constrain_operands (int strict)
while (--funny_match_index >= 0) while (--funny_match_index >= 0)
{ {
recog_data.operand[funny_match[funny_match_index].other] recog_data.operand[funny_match[funny_match_index].other]
= recog_data.operand[funny_match[funny_match_index].this]; = recog_data.operand[funny_match[funny_match_index].this_op];
} }
return 1; return 1;
...@@ -2987,7 +2987,7 @@ peephole2_optimize (void) ...@@ -2987,7 +2987,7 @@ peephole2_optimize (void)
prev = PREV_INSN (insn); prev = PREV_INSN (insn);
if (INSN_P (insn)) if (INSN_P (insn))
{ {
rtx try, before_try, x; rtx attempt, before_try, x;
int match_len; int match_len;
rtx note; rtx note;
bool was_call = false; bool was_call = false;
...@@ -3008,13 +3008,13 @@ peephole2_optimize (void) ...@@ -3008,13 +3008,13 @@ peephole2_optimize (void)
substitution would lose the substitution would lose the
REG_FRAME_RELATED_EXPR that is attached. */ REG_FRAME_RELATED_EXPR that is attached. */
peep2_current_count = 0; peep2_current_count = 0;
try = NULL; attempt = NULL;
} }
else else
/* Match the peephole. */ /* Match the peephole. */
try = peephole2_insns (PATTERN (insn), insn, &match_len); attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
if (try != NULL) if (attempt != NULL)
{ {
/* If we are splitting a CALL_INSN, look for the CALL_INSN /* If we are splitting a CALL_INSN, look for the CALL_INSN
in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
...@@ -3032,7 +3032,7 @@ peephole2_optimize (void) ...@@ -3032,7 +3032,7 @@ peephole2_optimize (void)
continue; continue;
was_call = true; was_call = true;
new_insn = try; new_insn = attempt;
while (new_insn != NULL_RTX) while (new_insn != NULL_RTX)
{ {
if (CALL_P (new_insn)) if (CALL_P (new_insn))
...@@ -3080,7 +3080,7 @@ peephole2_optimize (void) ...@@ -3080,7 +3080,7 @@ peephole2_optimize (void)
REG_EH_REGION, NULL_RTX); REG_EH_REGION, NULL_RTX);
/* Replace the old sequence with the new. */ /* Replace the old sequence with the new. */
try = emit_insn_after_setloc (try, peep2_insn_data[i].insn, attempt = emit_insn_after_setloc (attempt, peep2_insn_data[i].insn,
INSN_LOCATOR (peep2_insn_data[i].insn)); INSN_LOCATOR (peep2_insn_data[i].insn));
before_try = PREV_INSN (insn); before_try = PREV_INSN (insn);
delete_insn_chain (insn, peep2_insn_data[i].insn, false); delete_insn_chain (insn, peep2_insn_data[i].insn, false);
...@@ -3095,7 +3095,7 @@ peephole2_optimize (void) ...@@ -3095,7 +3095,7 @@ peephole2_optimize (void)
if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL)) if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
break; break;
for (x = try ; x != before_try ; x = PREV_INSN (x)) for (x = attempt ; x != before_try ; x = PREV_INSN (x))
if (CALL_P (x) if (CALL_P (x)
|| (flag_non_call_exceptions || (flag_non_call_exceptions
&& may_trap_p (PATTERN (x)) && may_trap_p (PATTERN (x))
...@@ -3145,7 +3145,7 @@ peephole2_optimize (void) ...@@ -3145,7 +3145,7 @@ peephole2_optimize (void)
bitmap_copy (live, peep2_insn_data[i].live_before); bitmap_copy (live, peep2_insn_data[i].live_before);
/* Update life information for the new sequence. */ /* Update life information for the new sequence. */
x = try; x = attempt;
do do
{ {
if (INSN_P (x)) if (INSN_P (x))
...@@ -3169,7 +3169,7 @@ peephole2_optimize (void) ...@@ -3169,7 +3169,7 @@ peephole2_optimize (void)
/* If we generated a jump instruction, it won't have /* If we generated a jump instruction, it won't have
JUMP_LABEL set. Recompute after we're done. */ JUMP_LABEL set. Recompute after we're done. */
for (x = try; x != before_try; x = PREV_INSN (x)) for (x = attempt; x != before_try; x = PREV_INSN (x))
if (JUMP_P (x)) if (JUMP_P (x))
{ {
do_rebuild_jump_labels = true; do_rebuild_jump_labels = true;
......
...@@ -319,7 +319,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional, ...@@ -319,7 +319,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
enum machine_mode reload_mode, enum reload_type type, enum machine_mode reload_mode, enum reload_type type,
enum insn_code *picode, secondary_reload_info *prev_sri) enum insn_code *picode, secondary_reload_info *prev_sri)
{ {
enum reg_class class = NO_REGS; enum reg_class rclass = NO_REGS;
enum reg_class scratch_class; enum reg_class scratch_class;
enum machine_mode mode = reload_mode; enum machine_mode mode = reload_mode;
enum insn_code icode = CODE_FOR_nothing; enum insn_code icode = CODE_FOR_nothing;
...@@ -362,15 +362,15 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional, ...@@ -362,15 +362,15 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
sri.icode = CODE_FOR_nothing; sri.icode = CODE_FOR_nothing;
sri.prev_sri = prev_sri; sri.prev_sri = prev_sri;
class = targetm.secondary_reload (in_p, x, reload_class, reload_mode, &sri); rclass = targetm.secondary_reload (in_p, x, reload_class, reload_mode, &sri);
icode = sri.icode; icode = sri.icode;
/* If we don't need any secondary registers, done. */ /* If we don't need any secondary registers, done. */
if (class == NO_REGS && icode == CODE_FOR_nothing) if (rclass == NO_REGS && icode == CODE_FOR_nothing)
return -1; return -1;
if (class != NO_REGS) if (rclass != NO_REGS)
t_reload = push_secondary_reload (in_p, x, opnum, optional, class, t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
reload_mode, type, &t_icode, &sri); reload_mode, type, &t_icode, &sri);
/* If we will be using an insn, the secondary reload is for a /* If we will be using an insn, the secondary reload is for a
...@@ -392,7 +392,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional, ...@@ -392,7 +392,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
an icode to reload from an intermediate tertiary reload register. an icode to reload from an intermediate tertiary reload register.
We should probably have a new field in struct reload to tag a We should probably have a new field in struct reload to tag a
chain of scratch operand reloads onto. */ chain of scratch operand reloads onto. */
gcc_assert (class == NO_REGS); gcc_assert (rclass == NO_REGS);
scratch_constraint = insn_data[(int) icode].operand[2].constraint; scratch_constraint = insn_data[(int) icode].operand[2].constraint;
gcc_assert (*scratch_constraint == '='); gcc_assert (*scratch_constraint == '=');
...@@ -404,7 +404,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional, ...@@ -404,7 +404,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
: REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter, : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
scratch_constraint)); scratch_constraint));
class = scratch_class; rclass = scratch_class;
mode = insn_data[(int) icode].operand[2].mode; mode = insn_data[(int) icode].operand[2].mode;
} }
...@@ -422,21 +422,21 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional, ...@@ -422,21 +422,21 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
Allow this when a reload_in/out pattern is being used. I.e. assume Allow this when a reload_in/out pattern is being used. I.e. assume
that the generated code handles this case. */ that the generated code handles this case. */
gcc_assert (!in_p || class != reload_class || icode != CODE_FOR_nothing gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
|| t_icode != CODE_FOR_nothing); || t_icode != CODE_FOR_nothing);
/* See if we can reuse an existing secondary reload. */ /* See if we can reuse an existing secondary reload. */
for (s_reload = 0; s_reload < n_reloads; s_reload++) for (s_reload = 0; s_reload < n_reloads; s_reload++)
if (rld[s_reload].secondary_p if (rld[s_reload].secondary_p
&& (reg_class_subset_p (class, rld[s_reload].class) && (reg_class_subset_p (rclass, rld[s_reload].class)
|| reg_class_subset_p (rld[s_reload].class, class)) || reg_class_subset_p (rld[s_reload].class, rclass))
&& ((in_p && rld[s_reload].inmode == mode) && ((in_p && rld[s_reload].inmode == mode)
|| (! in_p && rld[s_reload].outmode == mode)) || (! in_p && rld[s_reload].outmode == mode))
&& ((in_p && rld[s_reload].secondary_in_reload == t_reload) && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
|| (! in_p && rld[s_reload].secondary_out_reload == t_reload)) || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
&& ((in_p && rld[s_reload].secondary_in_icode == t_icode) && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
|| (! in_p && rld[s_reload].secondary_out_icode == t_icode)) || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
&& (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES) && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
&& MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed, && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
opnum, rld[s_reload].opnum)) opnum, rld[s_reload].opnum))
{ {
...@@ -445,8 +445,8 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional, ...@@ -445,8 +445,8 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
if (! in_p) if (! in_p)
rld[s_reload].outmode = mode; rld[s_reload].outmode = mode;
if (reg_class_subset_p (class, rld[s_reload].class)) if (reg_class_subset_p (rclass, rld[s_reload].class))
rld[s_reload].class = class; rld[s_reload].class = rclass;
rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum); rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
rld[s_reload].optional &= optional; rld[s_reload].optional &= optional;
...@@ -467,7 +467,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional, ...@@ -467,7 +467,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
way reloads are output. */ way reloads are output. */
if (in_p && icode == CODE_FOR_nothing if (in_p && icode == CODE_FOR_nothing
&& SECONDARY_MEMORY_NEEDED (class, reload_class, mode)) && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
{ {
get_secondary_mem (x, reload_mode, opnum, type); get_secondary_mem (x, reload_mode, opnum, type);
...@@ -479,7 +479,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional, ...@@ -479,7 +479,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
/* We need to make a new secondary reload for this register class. */ /* We need to make a new secondary reload for this register class. */
rld[s_reload].in = rld[s_reload].out = 0; rld[s_reload].in = rld[s_reload].out = 0;
rld[s_reload].class = class; rld[s_reload].class = rclass;
rld[s_reload].inmode = in_p ? mode : VOIDmode; rld[s_reload].inmode = in_p ? mode : VOIDmode;
rld[s_reload].outmode = ! in_p ? mode : VOIDmode; rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
...@@ -503,7 +503,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional, ...@@ -503,7 +503,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
#ifdef SECONDARY_MEMORY_NEEDED #ifdef SECONDARY_MEMORY_NEEDED
if (! in_p && icode == CODE_FOR_nothing if (! in_p && icode == CODE_FOR_nothing
&& SECONDARY_MEMORY_NEEDED (reload_class, class, mode)) && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
get_secondary_mem (x, mode, opnum, type); get_secondary_mem (x, mode, opnum, type);
#endif #endif
} }
...@@ -516,7 +516,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional, ...@@ -516,7 +516,7 @@ push_secondary_reload (int in_p, rtx x, int opnum, int optional,
register and a scratch register is needed, we return the class of the register and a scratch register is needed, we return the class of the
intermediate register. */ intermediate register. */
enum reg_class enum reg_class
secondary_reload_class (bool in_p, enum reg_class class, secondary_reload_class (bool in_p, enum reg_class rclass,
enum machine_mode mode, rtx x) enum machine_mode mode, rtx x)
{ {
enum insn_code icode; enum insn_code icode;
...@@ -524,13 +524,13 @@ secondary_reload_class (bool in_p, enum reg_class class, ...@@ -524,13 +524,13 @@ secondary_reload_class (bool in_p, enum reg_class class,
sri.icode = CODE_FOR_nothing; sri.icode = CODE_FOR_nothing;
sri.prev_sri = NULL; sri.prev_sri = NULL;
class = targetm.secondary_reload (in_p, x, class, mode, &sri); rclass = targetm.secondary_reload (in_p, x, rclass, mode, &sri);
icode = sri.icode; icode = sri.icode;
/* If there are no secondary reloads at all, we return NO_REGS. /* If there are no secondary reloads at all, we return NO_REGS.
If an intermediate register is needed, we return its class. */ If an intermediate register is needed, we return its class. */
if (icode == CODE_FOR_nothing || class != NO_REGS) if (icode == CODE_FOR_nothing || rclass != NO_REGS)
return class; return rclass;
/* No intermediate register is needed, but we have a special reload /* No intermediate register is needed, but we have a special reload
pattern, which we assume for now needs a scratch register. */ pattern, which we assume for now needs a scratch register. */
...@@ -547,7 +547,7 @@ scratch_reload_class (enum insn_code icode) ...@@ -547,7 +547,7 @@ scratch_reload_class (enum insn_code icode)
{ {
const char *scratch_constraint; const char *scratch_constraint;
char scratch_letter; char scratch_letter;
enum reg_class class; enum reg_class rclass;
gcc_assert (insn_data[(int) icode].n_operands == 3); gcc_assert (insn_data[(int) icode].n_operands == 3);
scratch_constraint = insn_data[(int) icode].operand[2].constraint; scratch_constraint = insn_data[(int) icode].operand[2].constraint;
...@@ -558,10 +558,10 @@ scratch_reload_class (enum insn_code icode) ...@@ -558,10 +558,10 @@ scratch_reload_class (enum insn_code icode)
scratch_letter = *scratch_constraint; scratch_letter = *scratch_constraint;
if (scratch_letter == 'r') if (scratch_letter == 'r')
return GENERAL_REGS; return GENERAL_REGS;
class = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter, rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
scratch_constraint); scratch_constraint);
gcc_assert (class != NO_REGS); gcc_assert (rclass != NO_REGS);
return class; return rclass;
} }
#ifdef SECONDARY_MEMORY_NEEDED #ifdef SECONDARY_MEMORY_NEEDED
...@@ -660,24 +660,24 @@ find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED, ...@@ -660,24 +660,24 @@ find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
unsigned int dest_regno ATTRIBUTE_UNUSED) unsigned int dest_regno ATTRIBUTE_UNUSED)
{ {
int best_cost = -1; int best_cost = -1;
int class; int rclass;
int regno; int regno;
enum reg_class best_class = NO_REGS; enum reg_class best_class = NO_REGS;
enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno); enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
unsigned int best_size = 0; unsigned int best_size = 0;
int cost; int cost;
for (class = 1; class < N_REG_CLASSES; class++) for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
{ {
int bad = 0; int bad = 0;
int good = 0; int good = 0;
for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++) for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)) if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
{ {
if (HARD_REGNO_MODE_OK (regno, inner)) if (HARD_REGNO_MODE_OK (regno, inner))
{ {
good = 1; good = 1;
if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno + n) if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
|| ! HARD_REGNO_MODE_OK (regno + n, outer)) || ! HARD_REGNO_MODE_OK (regno + n, outer))
bad = 1; bad = 1;
} }
...@@ -685,15 +685,15 @@ find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED, ...@@ -685,15 +685,15 @@ find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
if (bad || !good) if (bad || !good)
continue; continue;
cost = REGISTER_MOVE_COST (outer, class, dest_class); cost = REGISTER_MOVE_COST (outer, rclass, dest_class);
if ((reg_class_size[class] > best_size if ((reg_class_size[rclass] > best_size
&& (best_cost < 0 || best_cost >= cost)) && (best_cost < 0 || best_cost >= cost))
|| best_cost > cost) || best_cost > cost)
{ {
best_class = class; best_class = rclass;
best_size = reg_class_size[class]; best_size = reg_class_size[rclass];
best_cost = REGISTER_MOVE_COST (outer, class, dest_class); best_cost = REGISTER_MOVE_COST (outer, rclass, dest_class);
} }
} }
...@@ -704,14 +704,14 @@ find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED, ...@@ -704,14 +704,14 @@ find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
/* Return the number of a previously made reload that can be combined with /* Return the number of a previously made reload that can be combined with
a new one, or n_reloads if none of the existing reloads can be used. a new one, or n_reloads if none of the existing reloads can be used.
OUT, CLASS, TYPE and OPNUM are the same arguments as passed to OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
push_reload, they determine the kind of the new reload that we try to push_reload, they determine the kind of the new reload that we try to
combine. P_IN points to the corresponding value of IN, which can be combine. P_IN points to the corresponding value of IN, which can be
modified by this function. modified by this function.
DONT_SHARE is nonzero if we can't share any input-only reload for IN. */ DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
static int static int
find_reusable_reload (rtx *p_in, rtx out, enum reg_class class, find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
enum reload_type type, int opnum, int dont_share) enum reload_type type, int opnum, int dont_share)
{ {
rtx in = *p_in; rtx in = *p_in;
...@@ -732,18 +732,18 @@ find_reusable_reload (rtx *p_in, rtx out, enum reg_class class, ...@@ -732,18 +732,18 @@ find_reusable_reload (rtx *p_in, rtx out, enum reg_class class,
than we otherwise would. */ than we otherwise would. */
for (i = 0; i < n_reloads; i++) for (i = 0; i < n_reloads; i++)
if ((reg_class_subset_p (class, rld[i].class) if ((reg_class_subset_p (rclass, rld[i].class)
|| reg_class_subset_p (rld[i].class, class)) || reg_class_subset_p (rld[i].class, rclass))
/* If the existing reload has a register, it must fit our class. */ /* If the existing reload has a register, it must fit our class. */
&& (rld[i].reg_rtx == 0 && (rld[i].reg_rtx == 0
|| TEST_HARD_REG_BIT (reg_class_contents[(int) class], || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
true_regnum (rld[i].reg_rtx))) true_regnum (rld[i].reg_rtx)))
&& ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
&& (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out))) && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
|| (out != 0 && MATCHES (rld[i].out, out) || (out != 0 && MATCHES (rld[i].out, out)
&& (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in)))) && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out)) && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
&& (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES) && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
&& MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum)) && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
return i; return i;
...@@ -753,12 +753,12 @@ find_reusable_reload (rtx *p_in, rtx out, enum reg_class class, ...@@ -753,12 +753,12 @@ find_reusable_reload (rtx *p_in, rtx out, enum reg_class class,
the preincrementation as happening before any ref in this insn the preincrementation as happening before any ref in this insn
to that register. */ to that register. */
for (i = 0; i < n_reloads; i++) for (i = 0; i < n_reloads; i++)
if ((reg_class_subset_p (class, rld[i].class) if ((reg_class_subset_p (rclass, rld[i].class)
|| reg_class_subset_p (rld[i].class, class)) || reg_class_subset_p (rld[i].class, rclass))
/* If the existing reload has a register, it must fit our /* If the existing reload has a register, it must fit our
class. */ class. */
&& (rld[i].reg_rtx == 0 && (rld[i].reg_rtx == 0
|| TEST_HARD_REG_BIT (reg_class_contents[(int) class], || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
true_regnum (rld[i].reg_rtx))) true_regnum (rld[i].reg_rtx)))
&& out == 0 && rld[i].out == 0 && rld[i].in != 0 && out == 0 && rld[i].out == 0 && rld[i].in != 0
&& ((REG_P (in) && ((REG_P (in)
...@@ -768,7 +768,7 @@ find_reusable_reload (rtx *p_in, rtx out, enum reg_class class, ...@@ -768,7 +768,7 @@ find_reusable_reload (rtx *p_in, rtx out, enum reg_class class,
&& GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
&& MATCHES (XEXP (in, 0), rld[i].in))) && MATCHES (XEXP (in, 0), rld[i].in)))
&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out)) && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
&& (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES) && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
&& MERGABLE_RELOADS (type, rld[i].when_needed, && MERGABLE_RELOADS (type, rld[i].when_needed,
opnum, rld[i].opnum)) opnum, rld[i].opnum))
{ {
...@@ -878,7 +878,7 @@ can_reload_into (rtx in, int regno, enum machine_mode mode) ...@@ -878,7 +878,7 @@ can_reload_into (rtx in, int regno, enum machine_mode mode)
If IN and OUT are both nonzero, it means the same register must be used If IN and OUT are both nonzero, it means the same register must be used
to reload both IN and OUT. to reload both IN and OUT.
CLASS is a register class required for the reloaded data. RCLASS is a register class required for the reloaded data.
INMODE is the machine mode that the instruction requires INMODE is the machine mode that the instruction requires
for the reg that replaces IN and OUTMODE is likewise for OUT. for the reg that replaces IN and OUTMODE is likewise for OUT.
...@@ -904,7 +904,7 @@ can_reload_into (rtx in, int regno, enum machine_mode mode) ...@@ -904,7 +904,7 @@ can_reload_into (rtx in, int regno, enum machine_mode mode)
int int
push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
enum reg_class class, enum machine_mode inmode, enum reg_class rclass, enum machine_mode inmode,
enum machine_mode outmode, int strict_low, int optional, enum machine_mode outmode, int strict_low, int optional,
int opnum, enum reload_type type) int opnum, enum reload_type type)
{ {
...@@ -1003,7 +1003,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1003,7 +1003,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
if (in != 0 && GET_CODE (in) == SUBREG if (in != 0 && GET_CODE (in) == SUBREG
&& (subreg_lowpart_p (in) || strict_low) && (subreg_lowpart_p (in) || strict_low)
#ifdef CANNOT_CHANGE_MODE_CLASS #ifdef CANNOT_CHANGE_MODE_CLASS
&& !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, class) && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
#endif #endif
&& (CONSTANT_P (SUBREG_REG (in)) && (CONSTANT_P (SUBREG_REG (in))
|| GET_CODE (SUBREG_REG (in)) == PLUS || GET_CODE (SUBREG_REG (in)) == PLUS
...@@ -1043,8 +1043,8 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1043,8 +1043,8 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
!= (int) hard_regno_nregs[REGNO (SUBREG_REG (in))] != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
[GET_MODE (SUBREG_REG (in))])) [GET_MODE (SUBREG_REG (in))]))
|| ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode))) || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
|| (secondary_reload_class (1, class, inmode, in) != NO_REGS || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
&& (secondary_reload_class (1, class, GET_MODE (SUBREG_REG (in)), && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
SUBREG_REG (in)) SUBREG_REG (in))
== NO_REGS)) == NO_REGS))
#ifdef CANNOT_CHANGE_MODE_CLASS #ifdef CANNOT_CHANGE_MODE_CLASS
...@@ -1079,7 +1079,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1079,7 +1079,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0)) if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
{ {
enum reg_class in_class = class; enum reg_class in_class = rclass;
if (REG_P (SUBREG_REG (in))) if (REG_P (SUBREG_REG (in)))
in_class in_class
...@@ -1109,7 +1109,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1109,7 +1109,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
if (out != 0 && GET_CODE (out) == SUBREG if (out != 0 && GET_CODE (out) == SUBREG
&& (subreg_lowpart_p (out) || strict_low) && (subreg_lowpart_p (out) || strict_low)
#ifdef CANNOT_CHANGE_MODE_CLASS #ifdef CANNOT_CHANGE_MODE_CLASS
&& !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, class) && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
#endif #endif
&& (CONSTANT_P (SUBREG_REG (out)) && (CONSTANT_P (SUBREG_REG (out))
|| strict_low || strict_low
...@@ -1136,8 +1136,8 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1136,8 +1136,8 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
!= (int) hard_regno_nregs[REGNO (SUBREG_REG (out))] != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
[GET_MODE (SUBREG_REG (out))])) [GET_MODE (SUBREG_REG (out))]))
|| ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))) || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
|| (secondary_reload_class (0, class, outmode, out) != NO_REGS || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
&& (secondary_reload_class (0, class, GET_MODE (SUBREG_REG (out)), && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
SUBREG_REG (out)) SUBREG_REG (out))
== NO_REGS)) == NO_REGS))
#ifdef CANNOT_CHANGE_MODE_CLASS #ifdef CANNOT_CHANGE_MODE_CLASS
...@@ -1211,10 +1211,10 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1211,10 +1211,10 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
/* Narrow down the class of register wanted if that is /* Narrow down the class of register wanted if that is
desirable on this machine for efficiency. */ desirable on this machine for efficiency. */
{ {
enum reg_class preferred_class = class; enum reg_class preferred_class = rclass;
if (in != 0) if (in != 0)
preferred_class = PREFERRED_RELOAD_CLASS (in, class); preferred_class = PREFERRED_RELOAD_CLASS (in, rclass);
/* Output reloads may need analogous treatment, different in detail. */ /* Output reloads may need analogous treatment, different in detail. */
#ifdef PREFERRED_OUTPUT_RELOAD_CLASS #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
...@@ -1225,7 +1225,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1225,7 +1225,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
/* Discard what the target said if we cannot do it. */ /* Discard what the target said if we cannot do it. */
if (preferred_class != NO_REGS if (preferred_class != NO_REGS
|| (optional && type == RELOAD_FOR_OUTPUT)) || (optional && type == RELOAD_FOR_OUTPUT))
class = preferred_class; rclass = preferred_class;
} }
/* Make sure we use a class that can handle the actual pseudo /* Make sure we use a class that can handle the actual pseudo
...@@ -1234,14 +1234,14 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1234,14 +1234,14 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
can handle SImode, QImode needs a smaller class. */ can handle SImode, QImode needs a smaller class. */
#ifdef LIMIT_RELOAD_CLASS #ifdef LIMIT_RELOAD_CLASS
if (in_subreg_loc) if (in_subreg_loc)
class = LIMIT_RELOAD_CLASS (inmode, class); rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
else if (in != 0 && GET_CODE (in) == SUBREG) else if (in != 0 && GET_CODE (in) == SUBREG)
class = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), class); rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
if (out_subreg_loc) if (out_subreg_loc)
class = LIMIT_RELOAD_CLASS (outmode, class); rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
if (out != 0 && GET_CODE (out) == SUBREG) if (out != 0 && GET_CODE (out) == SUBREG)
class = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), class); rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
#endif #endif
/* Verify that this class is at least possible for the mode that /* Verify that this class is at least possible for the mode that
...@@ -1265,7 +1265,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1265,7 +1265,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
} }
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (HARD_REGNO_MODE_OK (i, mode) if (HARD_REGNO_MODE_OK (i, mode)
&& in_hard_reg_set_p (reg_class_contents[(int) class], mode, i)) && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
break; break;
if (i == FIRST_PSEUDO_REGISTER) if (i == FIRST_PSEUDO_REGISTER)
{ {
...@@ -1290,10 +1290,10 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1290,10 +1290,10 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
/* Optional output reloads are always OK even if we have no register class, /* Optional output reloads are always OK even if we have no register class,
since the function of these reloads is only to have spill_reg_store etc. since the function of these reloads is only to have spill_reg_store etc.
set, so that the storing insn can be deleted later. */ set, so that the storing insn can be deleted later. */
gcc_assert (class != NO_REGS gcc_assert (rclass != NO_REGS
|| (optional != 0 && type == RELOAD_FOR_OUTPUT)); || (optional != 0 && type == RELOAD_FOR_OUTPUT));
i = find_reusable_reload (&in, out, class, type, opnum, dont_share); i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
if (i == n_reloads) if (i == n_reloads)
{ {
...@@ -1303,11 +1303,11 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1303,11 +1303,11 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
if (in != 0) if (in != 0)
secondary_in_reload secondary_in_reload
= push_secondary_reload (1, in, opnum, optional, class, inmode, type, = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
&secondary_in_icode, NULL); &secondary_in_icode, NULL);
if (out != 0 && GET_CODE (out) != SCRATCH) if (out != 0 && GET_CODE (out) != SCRATCH)
secondary_out_reload secondary_out_reload
= push_secondary_reload (0, out, opnum, optional, class, outmode, = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
type, &secondary_out_icode, NULL); type, &secondary_out_icode, NULL);
/* We found no existing reload suitable for re-use. /* We found no existing reload suitable for re-use.
...@@ -1320,14 +1320,14 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1320,14 +1320,14 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
|| (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in)))) || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
&& reg_or_subregno (in) < FIRST_PSEUDO_REGISTER && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
&& SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)), && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
class, inmode)) rclass, inmode))
get_secondary_mem (in, inmode, opnum, type); get_secondary_mem (in, inmode, opnum, type);
#endif #endif
i = n_reloads; i = n_reloads;
rld[i].in = in; rld[i].in = in;
rld[i].out = out; rld[i].out = out;
rld[i].class = class; rld[i].class = rclass;
rld[i].inmode = inmode; rld[i].inmode = inmode;
rld[i].outmode = outmode; rld[i].outmode = outmode;
rld[i].reg_rtx = 0; rld[i].reg_rtx = 0;
...@@ -1351,7 +1351,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1351,7 +1351,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
&& (REG_P (out) && (REG_P (out)
|| (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out)))) || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
&& reg_or_subregno (out) < FIRST_PSEUDO_REGISTER && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
&& SECONDARY_MEMORY_NEEDED (class, && SECONDARY_MEMORY_NEEDED (rclass,
REGNO_REG_CLASS (reg_or_subregno (out)), REGNO_REG_CLASS (reg_or_subregno (out)),
outmode)) outmode))
get_secondary_mem (out, outmode, opnum, type); get_secondary_mem (out, outmode, opnum, type);
...@@ -1411,8 +1411,8 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1411,8 +1411,8 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
rld[i].out = out; rld[i].out = out;
rld[i].out_reg = outloc ? *outloc : 0; rld[i].out_reg = outloc ? *outloc : 0;
} }
if (reg_class_subset_p (class, rld[i].class)) if (reg_class_subset_p (rclass, rld[i].class))
rld[i].class = class; rld[i].class = rclass;
rld[i].optional &= optional; rld[i].optional &= optional;
if (MERGE_TO_OTHER (type, rld[i].when_needed, if (MERGE_TO_OTHER (type, rld[i].when_needed,
opnum, rld[i].opnum)) opnum, rld[i].opnum))
...@@ -1561,7 +1561,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, ...@@ -1561,7 +1561,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
for (offs = 0; offs < nregs; offs++) for (offs = 0; offs < nregs; offs++)
if (fixed_regs[regno + offs] if (fixed_regs[regno + offs]
|| ! TEST_HARD_REG_BIT (reg_class_contents[(int) class], || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
regno + offs)) regno + offs))
break; break;
...@@ -1867,7 +1867,7 @@ combine_reloads (void) ...@@ -1867,7 +1867,7 @@ combine_reloads (void)
If so, return the register rtx that proves acceptable. If so, return the register rtx that proves acceptable.
INLOC and OUTLOC are locations where IN and OUT appear in the insn. INLOC and OUTLOC are locations where IN and OUT appear in the insn.
CLASS is the register class required for the reload. RCLASS is the register class required for the reload.
If FOR_REAL is >= 0, it is the number of the reload, If FOR_REAL is >= 0, it is the number of the reload,
and in some cases when it can be discovered that OUT doesn't need and in some cases when it can be discovered that OUT doesn't need
...@@ -1884,7 +1884,7 @@ combine_reloads (void) ...@@ -1884,7 +1884,7 @@ combine_reloads (void)
static rtx static rtx
find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc, find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
enum machine_mode inmode, enum machine_mode outmode, enum machine_mode inmode, enum machine_mode outmode,
enum reg_class class, int for_real, int earlyclobber) enum reg_class rclass, int for_real, int earlyclobber)
{ {
rtx in = real_in; rtx in = real_in;
rtx out = real_out; rtx out = real_out;
...@@ -1927,9 +1927,9 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc, ...@@ -1927,9 +1927,9 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
/* Narrow down the reg class, the same way push_reload will; /* Narrow down the reg class, the same way push_reload will;
otherwise we might find a dummy now, but push_reload won't. */ otherwise we might find a dummy now, but push_reload won't. */
{ {
enum reg_class preferred_class = PREFERRED_RELOAD_CLASS (in, class); enum reg_class preferred_class = PREFERRED_RELOAD_CLASS (in, rclass);
if (preferred_class != NO_REGS) if (preferred_class != NO_REGS)
class = preferred_class; rclass = preferred_class;
} }
/* See if OUT will do. */ /* See if OUT will do. */
...@@ -1960,7 +1960,7 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc, ...@@ -1960,7 +1960,7 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
unsigned int i; unsigned int i;
for (i = 0; i < nwords; i++) for (i = 0; i < nwords; i++)
if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class], if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
regno + i)) regno + i))
break; break;
...@@ -2028,7 +2028,7 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc, ...@@ -2028,7 +2028,7 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
unsigned int i; unsigned int i;
for (i = 0; i < nwords; i++) for (i = 0; i < nwords; i++)
if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class], if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
regno + i)) regno + i))
break; break;
...@@ -5916,14 +5916,14 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context, ...@@ -5916,14 +5916,14 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
is larger than the class size, then reload the whole SUBREG. */ is larger than the class size, then reload the whole SUBREG. */
else else
{ {
enum reg_class class = context_reg_class; enum reg_class rclass = context_reg_class;
if ((unsigned) CLASS_MAX_NREGS (class, GET_MODE (SUBREG_REG (x))) if ((unsigned) CLASS_MAX_NREGS (rclass, GET_MODE (SUBREG_REG (x)))
> reg_class_size[class]) > reg_class_size[rclass])
{ {
x = find_reloads_subreg_address (x, 0, opnum, x = find_reloads_subreg_address (x, 0, opnum,
ADDR_TYPE (type), ADDR_TYPE (type),
ind_levels, insn); ind_levels, insn);
push_reload (x, NULL_RTX, loc, (rtx*) 0, class, push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
GET_MODE (x), VOIDmode, 0, 0, opnum, type); GET_MODE (x), VOIDmode, 0, 0, opnum, type);
return 1; return 1;
} }
...@@ -5954,7 +5954,7 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context, ...@@ -5954,7 +5954,7 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
} }
/* X, which is found at *LOC, is a part of an address that needs to be /* X, which is found at *LOC, is a part of an address that needs to be
reloaded into a register of class CLASS. If X is a constant, or if reloaded into a register of class RCLASS. If X is a constant, or if
X is a PLUS that contains a constant, check that the constant is a X is a PLUS that contains a constant, check that the constant is a
legitimate operand and that we are supposed to be able to load legitimate operand and that we are supposed to be able to load
it into the register. it into the register.
...@@ -5969,13 +5969,13 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context, ...@@ -5969,13 +5969,13 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
supports. */ supports. */
static void static void
find_reloads_address_part (rtx x, rtx *loc, enum reg_class class, find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
enum machine_mode mode, int opnum, enum machine_mode mode, int opnum,
enum reload_type type, int ind_levels) enum reload_type type, int ind_levels)
{ {
if (CONSTANT_P (x) if (CONSTANT_P (x)
&& (! LEGITIMATE_CONSTANT_P (x) && (! LEGITIMATE_CONSTANT_P (x)
|| PREFERRED_RELOAD_CLASS (x, class) == NO_REGS)) || PREFERRED_RELOAD_CLASS (x, rclass) == NO_REGS))
{ {
x = force_const_mem (mode, x); x = force_const_mem (mode, x);
find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0), find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
...@@ -5985,7 +5985,7 @@ find_reloads_address_part (rtx x, rtx *loc, enum reg_class class, ...@@ -5985,7 +5985,7 @@ find_reloads_address_part (rtx x, rtx *loc, enum reg_class class,
else if (GET_CODE (x) == PLUS else if (GET_CODE (x) == PLUS
&& CONSTANT_P (XEXP (x, 1)) && CONSTANT_P (XEXP (x, 1))
&& (! LEGITIMATE_CONSTANT_P (XEXP (x, 1)) && (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
|| PREFERRED_RELOAD_CLASS (XEXP (x, 1), class) == NO_REGS)) || PREFERRED_RELOAD_CLASS (XEXP (x, 1), rclass) == NO_REGS))
{ {
rtx tem; rtx tem;
...@@ -5995,7 +5995,7 @@ find_reloads_address_part (rtx x, rtx *loc, enum reg_class class, ...@@ -5995,7 +5995,7 @@ find_reloads_address_part (rtx x, rtx *loc, enum reg_class class,
opnum, type, ind_levels, 0); opnum, type, ind_levels, 0);
} }
push_reload (x, NULL_RTX, loc, (rtx*) 0, class, push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
mode, VOIDmode, 0, 0, opnum, type); mode, VOIDmode, 0, 0, opnum, type);
} }
...@@ -6600,7 +6600,7 @@ refers_to_mem_for_reload_p (rtx x) ...@@ -6600,7 +6600,7 @@ refers_to_mem_for_reload_p (rtx x)
/* Check the insns before INSN to see if there is a suitable register /* Check the insns before INSN to see if there is a suitable register
containing the same value as GOAL. containing the same value as GOAL.
If OTHER is -1, look for a register in class CLASS. If OTHER is -1, look for a register in class RCLASS.
Otherwise, just see if register number OTHER shares GOAL's value. Otherwise, just see if register number OTHER shares GOAL's value.
Return an rtx for the register found, or zero if none is found. Return an rtx for the register found, or zero if none is found.
...@@ -6626,7 +6626,7 @@ refers_to_mem_for_reload_p (rtx x) ...@@ -6626,7 +6626,7 @@ refers_to_mem_for_reload_p (rtx x)
as if it were a constant except that sp is required to be unchanging. */ as if it were a constant except that sp is required to be unchanging. */
rtx rtx
find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other, find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
short *reload_reg_p, int goalreg, enum machine_mode mode) short *reload_reg_p, int goalreg, enum machine_mode mode)
{ {
rtx p = insn; rtx p = insn;
...@@ -6772,7 +6772,7 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other, ...@@ -6772,7 +6772,7 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
} }
else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER) else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
continue; continue;
else if (!in_hard_reg_set_p (reg_class_contents[(int) class], else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
mode, valueno)) mode, valueno))
continue; continue;
value = valtry; value = valtry;
......
...@@ -1976,16 +1976,16 @@ delete_caller_save_insns (void) ...@@ -1976,16 +1976,16 @@ delete_caller_save_insns (void)
INSN should be one of the insns which needed this particular spill reg. */ INSN should be one of the insns which needed this particular spill reg. */
static void static void
spill_failure (rtx insn, enum reg_class class) spill_failure (rtx insn, enum reg_class rclass)
{ {
if (asm_noperands (PATTERN (insn)) >= 0) if (asm_noperands (PATTERN (insn)) >= 0)
error_for_asm (insn, "can't find a register in class %qs while " error_for_asm (insn, "can't find a register in class %qs while "
"reloading %<asm%>", "reloading %<asm%>",
reg_class_names[class]); reg_class_names[rclass]);
else else
{ {
error ("unable to find a register to spill in class %qs", error ("unable to find a register to spill in class %qs",
reg_class_names[class]); reg_class_names[rclass]);
if (dump_file) if (dump_file)
{ {
...@@ -2394,7 +2394,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2394,7 +2394,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
enum rtx_code code = GET_CODE (x); enum rtx_code code = GET_CODE (x);
struct elim_table *ep; struct elim_table *ep;
int regno; int regno;
rtx new; rtx new_rtx;
int i, j; int i, j;
const char *fmt; const char *fmt;
int copied = 0; int copied = 0;
...@@ -2523,15 +2523,15 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2523,15 +2523,15 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
&& reg_equiv_constant[REGNO (new0)] != 0) && reg_equiv_constant[REGNO (new0)] != 0)
new0 = reg_equiv_constant[REGNO (new0)]; new0 = reg_equiv_constant[REGNO (new0)];
new = form_sum (new0, new1); new_rtx = form_sum (new0, new1);
/* As above, if we are not inside a MEM we do not want to /* As above, if we are not inside a MEM we do not want to
turn a PLUS into something else. We might try to do so here turn a PLUS into something else. We might try to do so here
for an addition of 0 if we aren't optimizing. */ for an addition of 0 if we aren't optimizing. */
if (! mem_mode && GET_CODE (new) != PLUS) if (! mem_mode && GET_CODE (new_rtx) != PLUS)
return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx); return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
else else
return new; return new_rtx;
} }
} }
return x; return x;
...@@ -2588,8 +2588,8 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2588,8 +2588,8 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
/* If we have something in XEXP (x, 0), the usual case, eliminate it. */ /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
if (XEXP (x, 0)) if (XEXP (x, 0))
{ {
new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true); new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
if (new != XEXP (x, 0)) if (new_rtx != XEXP (x, 0))
{ {
/* If this is a REG_DEAD note, it is not valid anymore. /* If this is a REG_DEAD note, it is not valid anymore.
Using the eliminated version could result in creating a Using the eliminated version could result in creating a
...@@ -2599,7 +2599,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2599,7 +2599,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true) ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
: NULL_RTX); : NULL_RTX);
x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1)); x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
} }
} }
...@@ -2611,10 +2611,10 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2611,10 +2611,10 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
strictly needed, but it simplifies the code. */ strictly needed, but it simplifies the code. */
if (XEXP (x, 1)) if (XEXP (x, 1))
{ {
new = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true); new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
if (new != XEXP (x, 1)) if (new_rtx != XEXP (x, 1))
return return
gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new); gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
} }
return x; return x;
...@@ -2636,13 +2636,13 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2636,13 +2636,13 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
if (GET_CODE (XEXP (x, 1)) == PLUS if (GET_CODE (XEXP (x, 1)) == PLUS
&& XEXP (XEXP (x, 1), 0) == XEXP (x, 0)) && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
{ {
rtx new = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode, rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
insn, true); insn, true);
if (new != XEXP (XEXP (x, 1), 1)) if (new_rtx != XEXP (XEXP (x, 1), 1))
return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0), return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
gen_rtx_PLUS (GET_MODE (x), gen_rtx_PLUS (GET_MODE (x),
XEXP (x, 0), new)); XEXP (x, 0), new_rtx));
} }
return x; return x;
...@@ -2660,9 +2660,9 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2660,9 +2660,9 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
case POPCOUNT: case POPCOUNT:
case PARITY: case PARITY:
case BSWAP: case BSWAP:
new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false); new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
if (new != XEXP (x, 0)) if (new_rtx != XEXP (x, 0))
return gen_rtx_fmt_e (code, GET_MODE (x), new); return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
return x; return x;
case SUBREG: case SUBREG:
...@@ -2678,17 +2678,17 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2678,17 +2678,17 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
&& reg_equiv_memory_loc != 0 && reg_equiv_memory_loc != 0
&& reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0) && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
{ {
new = SUBREG_REG (x); new_rtx = SUBREG_REG (x);
} }
else else
new = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false); new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
if (new != SUBREG_REG (x)) if (new_rtx != SUBREG_REG (x))
{ {
int x_size = GET_MODE_SIZE (GET_MODE (x)); int x_size = GET_MODE_SIZE (GET_MODE (x));
int new_size = GET_MODE_SIZE (GET_MODE (new)); int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
if (MEM_P (new) if (MEM_P (new_rtx)
&& ((x_size < new_size && ((x_size < new_size
#ifdef WORD_REGISTER_OPERATIONS #ifdef WORD_REGISTER_OPERATIONS
/* On these machines, combine can create rtl of the form /* On these machines, combine can create rtl of the form
...@@ -2704,9 +2704,9 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2704,9 +2704,9 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
) )
|| x_size == new_size) || x_size == new_size)
) )
return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x)); return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
else else
return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x)); return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
} }
return x; return x;
...@@ -2722,9 +2722,9 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2722,9 +2722,9 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
case USE: case USE:
/* Handle insn_list USE that a call to a pure function may generate. */ /* Handle insn_list USE that a call to a pure function may generate. */
new = eliminate_regs_1 (XEXP (x, 0), 0, insn, false); new_rtx = eliminate_regs_1 (XEXP (x, 0), 0, insn, false);
if (new != XEXP (x, 0)) if (new_rtx != XEXP (x, 0))
return gen_rtx_USE (GET_MODE (x), new); return gen_rtx_USE (GET_MODE (x), new_rtx);
return x; return x;
case CLOBBER: case CLOBBER:
...@@ -2743,21 +2743,21 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2743,21 +2743,21 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
{ {
if (*fmt == 'e') if (*fmt == 'e')
{ {
new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false); new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
if (new != XEXP (x, i) && ! copied) if (new_rtx != XEXP (x, i) && ! copied)
{ {
x = shallow_copy_rtx (x); x = shallow_copy_rtx (x);
copied = 1; copied = 1;
} }
XEXP (x, i) = new; XEXP (x, i) = new_rtx;
} }
else if (*fmt == 'E') else if (*fmt == 'E')
{ {
int copied_vec = 0; int copied_vec = 0;
for (j = 0; j < XVECLEN (x, i); j++) for (j = 0; j < XVECLEN (x, i); j++)
{ {
new = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false); new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
if (new != XVECEXP (x, i, j) && ! copied_vec) if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
{ {
rtvec new_v = gen_rtvec_v (XVECLEN (x, i), rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
XVEC (x, i)->elem); XVEC (x, i)->elem);
...@@ -2769,7 +2769,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn, ...@@ -2769,7 +2769,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
XVEC (x, i) = new_v; XVEC (x, i) = new_v;
copied_vec = 1; copied_vec = 1;
} }
XVECEXP (x, i, j) = new; XVECEXP (x, i, j) = new_rtx;
} }
} }
} }
...@@ -5474,7 +5474,7 @@ allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r, ...@@ -5474,7 +5474,7 @@ allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
for (count = 0; count < n_spills; count++) for (count = 0; count < n_spills; count++)
{ {
int class = (int) rld[r].class; int rclass = (int) rld[r].class;
int regnum; int regnum;
i++; i++;
...@@ -5491,7 +5491,7 @@ allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r, ...@@ -5491,7 +5491,7 @@ allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
&& free_for_value_p (regnum, rld[r].mode, rld[r].opnum, && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
rld[r].when_needed, rld[r].in, rld[r].when_needed, rld[r].in,
rld[r].out, r, 1))) rld[r].out, r, 1)))
&& TEST_HARD_REG_BIT (reg_class_contents[class], regnum) && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
&& HARD_REGNO_MODE_OK (regnum, rld[r].mode) && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
/* Look first for regs to share, then for unshared. But /* Look first for regs to share, then for unshared. But
don't share regs used for inherited reloads; they are don't share regs used for inherited reloads; they are
...@@ -5521,7 +5521,7 @@ allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r, ...@@ -5521,7 +5521,7 @@ allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
while (nr > 1) while (nr > 1)
{ {
int regno = regnum + nr - 1; int regno = regnum + nr - 1;
if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno) if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
&& spill_reg_order[regno] >= 0 && spill_reg_order[regno] >= 0
&& reload_reg_free_p (regno, rld[r].opnum, && reload_reg_free_p (regno, rld[r].opnum,
rld[r].when_needed))) rld[r].when_needed)))
...@@ -5793,7 +5793,7 @@ choose_reload_regs (struct insn_chain *chain) ...@@ -5793,7 +5793,7 @@ choose_reload_regs (struct insn_chain *chain)
#endif #endif
) )
{ {
enum reg_class class = rld[r].class, last_class; enum reg_class rclass = rld[r].class, last_class;
rtx last_reg = reg_last_reload_reg[regno]; rtx last_reg = reg_last_reload_reg[regno];
enum machine_mode need_mode; enum machine_mode need_mode;
...@@ -5814,18 +5814,18 @@ choose_reload_regs (struct insn_chain *chain) ...@@ -5814,18 +5814,18 @@ choose_reload_regs (struct insn_chain *chain)
&& reg_reloaded_contents[i] == regno && reg_reloaded_contents[i] == regno
&& TEST_HARD_REG_BIT (reg_reloaded_valid, i) && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
&& HARD_REGNO_MODE_OK (i, rld[r].mode) && HARD_REGNO_MODE_OK (i, rld[r].mode)
&& (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i) && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
/* Even if we can't use this register as a reload /* Even if we can't use this register as a reload
register, we might use it for reload_override_in, register, we might use it for reload_override_in,
if copying it to the desired class is cheap if copying it to the desired class is cheap
enough. */ enough. */
|| ((REGISTER_MOVE_COST (mode, last_class, class) || ((REGISTER_MOVE_COST (mode, last_class, rclass)
< MEMORY_MOVE_COST (mode, class, 1)) < MEMORY_MOVE_COST (mode, rclass, 1))
&& (secondary_reload_class (1, class, mode, && (secondary_reload_class (1, rclass, mode,
last_reg) last_reg)
== NO_REGS) == NO_REGS)
#ifdef SECONDARY_MEMORY_NEEDED #ifdef SECONDARY_MEMORY_NEEDED
&& ! SECONDARY_MEMORY_NEEDED (last_class, class, && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
mode) mode)
#endif #endif
)) ))
......
...@@ -2470,32 +2470,32 @@ replace_rtx (rtx x, rtx from, rtx to) ...@@ -2470,32 +2470,32 @@ replace_rtx (rtx x, rtx from, rtx to)
if (GET_CODE (x) == SUBREG) if (GET_CODE (x) == SUBREG)
{ {
rtx new = replace_rtx (SUBREG_REG (x), from, to); rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
if (GET_CODE (new) == CONST_INT) if (GET_CODE (new_rtx) == CONST_INT)
{ {
x = simplify_subreg (GET_MODE (x), new, x = simplify_subreg (GET_MODE (x), new_rtx,
GET_MODE (SUBREG_REG (x)), GET_MODE (SUBREG_REG (x)),
SUBREG_BYTE (x)); SUBREG_BYTE (x));
gcc_assert (x); gcc_assert (x);
} }
else else
SUBREG_REG (x) = new; SUBREG_REG (x) = new_rtx;
return x; return x;
} }
else if (GET_CODE (x) == ZERO_EXTEND) else if (GET_CODE (x) == ZERO_EXTEND)
{ {
rtx new = replace_rtx (XEXP (x, 0), from, to); rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
if (GET_CODE (new) == CONST_INT) if (GET_CODE (new_rtx) == CONST_INT)
{ {
x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
new, GET_MODE (XEXP (x, 0))); new_rtx, GET_MODE (XEXP (x, 0)));
gcc_assert (x); gcc_assert (x);
} }
else else
XEXP (x, 0) = new; XEXP (x, 0) = new_rtx;
return x; return x;
} }
...@@ -3692,12 +3692,12 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -3692,12 +3692,12 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
{ {
unsigned HOST_WIDE_INT nonzero_for_hook = nonzero; unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x, rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
known_mode, known_ret, known_mode, known_ret,
&nonzero_for_hook); &nonzero_for_hook);
if (new) if (new_rtx)
nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x, nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
known_mode, known_ret); known_mode, known_ret);
return nonzero_for_hook; return nonzero_for_hook;
...@@ -4177,12 +4177,12 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, ...@@ -4177,12 +4177,12 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
{ {
unsigned int copies_for_hook = 1, copies = 1; unsigned int copies_for_hook = 1, copies = 1;
rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x, rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
known_mode, known_ret, known_mode, known_ret,
&copies_for_hook); &copies_for_hook);
if (new) if (new_rtx)
copies = cached_num_sign_bit_copies (new, mode, known_x, copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
known_mode, known_ret); known_mode, known_ret);
if (copies > 1 || copies_for_hook > 1) if (copies > 1 || copies_for_hook > 1)
......
...@@ -141,7 +141,7 @@ gen_lowpart_if_possible (enum machine_mode mode, rtx x) ...@@ -141,7 +141,7 @@ gen_lowpart_if_possible (enum machine_mode mode, rtx x)
{ {
/* This is the only other case we handle. */ /* This is the only other case we handle. */
int offset = 0; int offset = 0;
rtx new; rtx new_rtx;
if (WORDS_BIG_ENDIAN) if (WORDS_BIG_ENDIAN)
offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD) offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
...@@ -152,11 +152,11 @@ gen_lowpart_if_possible (enum machine_mode mode, rtx x) ...@@ -152,11 +152,11 @@ gen_lowpart_if_possible (enum machine_mode mode, rtx x)
offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)) offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
- MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x)))); - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
new = adjust_address_nv (x, mode, offset); new_rtx = adjust_address_nv (x, mode, offset);
if (! memory_address_p (mode, XEXP (new, 0))) if (! memory_address_p (mode, XEXP (new_rtx, 0)))
return 0; return 0;
return new; return new_rtx;
} }
else if (mode != GET_MODE (x) && GET_MODE (x) != VOIDmode else if (mode != GET_MODE (x) && GET_MODE (x) != VOIDmode
&& validate_subreg (mode, GET_MODE (x), x, && validate_subreg (mode, GET_MODE (x), x,
......
...@@ -357,7 +357,7 @@ static void ...@@ -357,7 +357,7 @@ static void
add_deps_for_risky_insns (rtx head, rtx tail) add_deps_for_risky_insns (rtx head, rtx tail)
{ {
rtx insn, prev; rtx insn, prev;
int class; int classification;
rtx last_jump = NULL_RTX; rtx last_jump = NULL_RTX;
rtx next_tail = NEXT_INSN (tail); rtx next_tail = NEXT_INSN (tail);
basic_block last_block = NULL, bb; basic_block last_block = NULL, bb;
...@@ -372,9 +372,9 @@ add_deps_for_risky_insns (rtx head, rtx tail) ...@@ -372,9 +372,9 @@ add_deps_for_risky_insns (rtx head, rtx tail)
} }
else if (INSN_P (insn) && last_jump != NULL_RTX) else if (INSN_P (insn) && last_jump != NULL_RTX)
{ {
class = haifa_classify_insn (insn); classification = haifa_classify_insn (insn);
prev = last_jump; prev = last_jump;
switch (class) switch (classification)
{ {
case PFREE_CANDIDATE: case PFREE_CANDIDATE:
if (flag_schedule_speculative_load) if (flag_schedule_speculative_load)
......
...@@ -2388,10 +2388,10 @@ static struct deps *bb_deps; ...@@ -2388,10 +2388,10 @@ static struct deps *bb_deps;
static rtx static rtx
concat_INSN_LIST (rtx copy, rtx old) concat_INSN_LIST (rtx copy, rtx old)
{ {
rtx new = old; rtx new_rtx = old;
for (; copy ; copy = XEXP (copy, 1)) for (; copy ; copy = XEXP (copy, 1))
new = alloc_INSN_LIST (XEXP (copy, 0), new); new_rtx = alloc_INSN_LIST (XEXP (copy, 0), new_rtx);
return new; return new_rtx;
} }
static void static void
......
...@@ -163,12 +163,12 @@ variable_size (tree size) ...@@ -163,12 +163,12 @@ variable_size (tree size)
#endif #endif
/* Return the machine mode to use for a nonscalar of SIZE bits. The /* Return the machine mode to use for a nonscalar of SIZE bits. The
mode must be in class CLASS, and have exactly that many value bits; mode must be in class MCLASS, and have exactly that many value bits;
it may have padding as well. If LIMIT is nonzero, modes of wider it may have padding as well. If LIMIT is nonzero, modes of wider
than MAX_FIXED_MODE_SIZE will not be used. */ than MAX_FIXED_MODE_SIZE will not be used. */
enum machine_mode enum machine_mode
mode_for_size (unsigned int size, enum mode_class class, int limit) mode_for_size (unsigned int size, enum mode_class mclass, int limit)
{ {
enum machine_mode mode; enum machine_mode mode;
...@@ -176,7 +176,7 @@ mode_for_size (unsigned int size, enum mode_class class, int limit) ...@@ -176,7 +176,7 @@ mode_for_size (unsigned int size, enum mode_class class, int limit)
return BLKmode; return BLKmode;
/* Get the first mode which has this size, in the specified class. */ /* Get the first mode which has this size, in the specified class. */
for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode; for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode)) mode = GET_MODE_WIDER_MODE (mode))
if (GET_MODE_PRECISION (mode) == size) if (GET_MODE_PRECISION (mode) == size)
return mode; return mode;
...@@ -187,7 +187,7 @@ mode_for_size (unsigned int size, enum mode_class class, int limit) ...@@ -187,7 +187,7 @@ mode_for_size (unsigned int size, enum mode_class class, int limit)
/* Similar, except passed a tree node. */ /* Similar, except passed a tree node. */
enum machine_mode enum machine_mode
mode_for_size_tree (const_tree size, enum mode_class class, int limit) mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
{ {
unsigned HOST_WIDE_INT uhwi; unsigned HOST_WIDE_INT uhwi;
unsigned int ui; unsigned int ui;
...@@ -198,20 +198,20 @@ mode_for_size_tree (const_tree size, enum mode_class class, int limit) ...@@ -198,20 +198,20 @@ mode_for_size_tree (const_tree size, enum mode_class class, int limit)
ui = uhwi; ui = uhwi;
if (uhwi != ui) if (uhwi != ui)
return BLKmode; return BLKmode;
return mode_for_size (ui, class, limit); return mode_for_size (ui, mclass, limit);
} }
/* Similar, but never return BLKmode; return the narrowest mode that /* Similar, but never return BLKmode; return the narrowest mode that
contains at least the requested number of value bits. */ contains at least the requested number of value bits. */
enum machine_mode enum machine_mode
smallest_mode_for_size (unsigned int size, enum mode_class class) smallest_mode_for_size (unsigned int size, enum mode_class mclass)
{ {
enum machine_mode mode; enum machine_mode mode;
/* Get the first mode which has at least this size, in the /* Get the first mode which has at least this size, in the
specified class. */ specified class. */
for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode; for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode)) mode = GET_MODE_WIDER_MODE (mode))
if (GET_MODE_PRECISION (mode) >= size) if (GET_MODE_PRECISION (mode) >= size)
return mode; return mode;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment