Commit 618f4073 by Trevor Saunders Committed by Trevor Saunders

remove more ifdefs for HAVE_cc0

gcc/ChangeLog:

2015-04-21  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* caller-save.c (insert_one_insn): Remove ifdef HAVE_cc0.
	* cfgcleanup.c (flow_find_cross_jump): Likewise.
	(flow_find_head_matching_sequence): Likewise.
	(try_head_merge_bb): Likewise.
	* combine.c (can_combine_p): Likewise.
	(try_combine): Likewise.
	(distribute_notes): Likewise.
	* df-problems.c (can_move_insns_across): Likewise.
	* final.c (final): Likewise.
	* gcse.c (insert_insn_end_basic_block): Likewise.
	* ira.c (find_moveable_pseudos): Likewise.
	* reorg.c (try_merge_delay_insns): Likewise.
	(fill_simple_delay_slots): Likewise.
	(fill_slots_from_thread): Likewise.
	* sched-deps.c (sched_analyze_2): Likewise.

From-SVN: r222302
parent faa7b0de
2015-04-21 Trevor Saunders <tbsaunde+gcc@tbsaunde.org> 2015-04-21 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* caller-save.c (insert_one_insn): Remove ifdef HAVE_cc0.
* cfgcleanup.c (flow_find_cross_jump): Likewise.
(flow_find_head_matching_sequence): Likewise.
(try_head_merge_bb): Likewise.
* combine.c (can_combine_p): Likewise.
(try_combine): Likewise.
(distribute_notes): Likewise.
* df-problems.c (can_move_insns_across): Likewise.
* final.c (final): Likewise.
* gcse.c (insert_insn_end_basic_block): Likewise.
* ira.c (find_moveable_pseudos): Likewise.
* reorg.c (try_merge_delay_insns): Likewise.
(fill_simple_delay_slots): Likewise.
(fill_slots_from_thread): Likewise.
* sched-deps.c (sched_analyze_2): Likewise.
2015-04-21 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* df-scan.c (df_get_entry_block_def_set): Remove #ifdef * df-scan.c (df_get_entry_block_def_set): Remove #ifdef
PIC_OFFSET_TABLE_REGNUM. PIC_OFFSET_TABLE_REGNUM.
......
...@@ -1400,18 +1400,16 @@ insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat) ...@@ -1400,18 +1400,16 @@ insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
rtx_insn *insn = chain->insn; rtx_insn *insn = chain->insn;
struct insn_chain *new_chain; struct insn_chain *new_chain;
#if HAVE_cc0
/* If INSN references CC0, put our insns in front of the insn that sets /* If INSN references CC0, put our insns in front of the insn that sets
CC0. This is always safe, since the only way we could be passed an CC0. This is always safe, since the only way we could be passed an
insn that references CC0 is for a restore, and doing a restore earlier insn that references CC0 is for a restore, and doing a restore earlier
isn't a problem. We do, however, assume here that CALL_INSNs don't isn't a problem. We do, however, assume here that CALL_INSNs don't
reference CC0. Guard against non-INSN's like CODE_LABEL. */ reference CC0. Guard against non-INSN's like CODE_LABEL. */
if ((NONJUMP_INSN_P (insn) || JUMP_P (insn)) if (HAVE_cc0 && (NONJUMP_INSN_P (insn) || JUMP_P (insn))
&& before_p && before_p
&& reg_referenced_p (cc0_rtx, PATTERN (insn))) && reg_referenced_p (cc0_rtx, PATTERN (insn)))
chain = chain->prev, insn = chain->insn; chain = chain->prev, insn = chain->insn;
#endif
new_chain = new_insn_chain (); new_chain = new_insn_chain ();
if (before_p) if (before_p)
......
...@@ -1456,12 +1456,11 @@ flow_find_cross_jump (basic_block bb1, basic_block bb2, rtx_insn **f1, ...@@ -1456,12 +1456,11 @@ flow_find_cross_jump (basic_block bb1, basic_block bb2, rtx_insn **f1,
i2 = PREV_INSN (i2); i2 = PREV_INSN (i2);
} }
#if HAVE_cc0
/* Don't allow the insn after a compare to be shared by /* Don't allow the insn after a compare to be shared by
cross-jumping unless the compare is also shared. */ cross-jumping unless the compare is also shared. */
if (ninsns && reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1)) if (HAVE_cc0 && ninsns && reg_mentioned_p (cc0_rtx, last1)
&& ! sets_cc0_p (last1))
last1 = afterlast1, last2 = afterlast2, last_dir = afterlast_dir, ninsns--; last1 = afterlast1, last2 = afterlast2, last_dir = afterlast_dir, ninsns--;
#endif
/* Include preceding notes and labels in the cross-jump. One, /* Include preceding notes and labels in the cross-jump. One,
this may bring us to the head of the blocks as requested above. this may bring us to the head of the blocks as requested above.
...@@ -1579,12 +1578,11 @@ flow_find_head_matching_sequence (basic_block bb1, basic_block bb2, rtx_insn **f ...@@ -1579,12 +1578,11 @@ flow_find_head_matching_sequence (basic_block bb1, basic_block bb2, rtx_insn **f
i2 = NEXT_INSN (i2); i2 = NEXT_INSN (i2);
} }
#if HAVE_cc0
/* Don't allow a compare to be shared by cross-jumping unless the insn /* Don't allow a compare to be shared by cross-jumping unless the insn
after the compare is also shared. */ after the compare is also shared. */
if (ninsns && reg_mentioned_p (cc0_rtx, last1) && sets_cc0_p (last1)) if (HAVE_cc0 && ninsns && reg_mentioned_p (cc0_rtx, last1)
&& sets_cc0_p (last1))
last1 = beforelast1, last2 = beforelast2, ninsns--; last1 = beforelast1, last2 = beforelast2, ninsns--;
#endif
if (ninsns) if (ninsns)
{ {
...@@ -2370,11 +2368,9 @@ try_head_merge_bb (basic_block bb) ...@@ -2370,11 +2368,9 @@ try_head_merge_bb (basic_block bb)
cond = get_condition (jump, &move_before, true, false); cond = get_condition (jump, &move_before, true, false);
if (cond == NULL_RTX) if (cond == NULL_RTX)
{ {
#if HAVE_cc0 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, jump))
if (reg_mentioned_p (cc0_rtx, jump))
move_before = prev_nonnote_nondebug_insn (jump); move_before = prev_nonnote_nondebug_insn (jump);
else else
#endif
move_before = jump; move_before = jump;
} }
...@@ -2539,11 +2535,9 @@ try_head_merge_bb (basic_block bb) ...@@ -2539,11 +2535,9 @@ try_head_merge_bb (basic_block bb)
cond = get_condition (jump, &move_before, true, false); cond = get_condition (jump, &move_before, true, false);
if (cond == NULL_RTX) if (cond == NULL_RTX)
{ {
#if HAVE_cc0 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, jump))
if (reg_mentioned_p (cc0_rtx, jump))
move_before = prev_nonnote_nondebug_insn (jump); move_before = prev_nonnote_nondebug_insn (jump);
else else
#endif
move_before = jump; move_before = jump;
} }
} }
...@@ -2562,12 +2556,10 @@ try_head_merge_bb (basic_block bb) ...@@ -2562,12 +2556,10 @@ try_head_merge_bb (basic_block bb)
/* Try again, using a different insertion point. */ /* Try again, using a different insertion point. */
move_before = jump; move_before = jump;
#if HAVE_cc0
/* Don't try moving before a cc0 user, as that may invalidate /* Don't try moving before a cc0 user, as that may invalidate
the cc0. */ the cc0. */
if (reg_mentioned_p (cc0_rtx, jump)) if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, jump))
break; break;
#endif
continue; continue;
} }
...@@ -2622,12 +2614,10 @@ try_head_merge_bb (basic_block bb) ...@@ -2622,12 +2614,10 @@ try_head_merge_bb (basic_block bb)
/* For the unmerged insns, try a different insertion point. */ /* For the unmerged insns, try a different insertion point. */
move_before = jump; move_before = jump;
#if HAVE_cc0
/* Don't try moving before a cc0 user, as that may invalidate /* Don't try moving before a cc0 user, as that may invalidate
the cc0. */ the cc0. */
if (reg_mentioned_p (cc0_rtx, jump)) if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, jump))
break; break;
#endif
for (ix = 0; ix < nedges; ix++) for (ix = 0; ix < nedges; ix++)
currptr[ix] = headptr[ix] = nextptr[ix]; currptr[ix] = headptr[ix] = nextptr[ix];
......
...@@ -2066,7 +2066,6 @@ can_combine_p (rtx_insn *insn, rtx_insn *i3, rtx_insn *pred ATTRIBUTE_UNUSED, ...@@ -2066,7 +2066,6 @@ can_combine_p (rtx_insn *insn, rtx_insn *i3, rtx_insn *pred ATTRIBUTE_UNUSED,
return 0; return 0;
#endif #endif
#if HAVE_cc0
/* Don't combine an insn that follows a CC0-setting insn. /* Don't combine an insn that follows a CC0-setting insn.
An insn that uses CC0 must not be separated from the one that sets it. An insn that uses CC0 must not be separated from the one that sets it.
We do, however, allow I2 to follow a CC0-setting insn if that insn We do, however, allow I2 to follow a CC0-setting insn if that insn
...@@ -2076,11 +2075,13 @@ can_combine_p (rtx_insn *insn, rtx_insn *i3, rtx_insn *pred ATTRIBUTE_UNUSED, ...@@ -2076,11 +2075,13 @@ can_combine_p (rtx_insn *insn, rtx_insn *i3, rtx_insn *pred ATTRIBUTE_UNUSED,
It would be more logical to test whether CC0 occurs inside I1 or I2, It would be more logical to test whether CC0 occurs inside I1 or I2,
but that would be much slower, and this ought to be equivalent. */ but that would be much slower, and this ought to be equivalent. */
p = prev_nonnote_insn (insn); if (HAVE_cc0)
if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p)) {
&& ! all_adjacent) p = prev_nonnote_insn (insn);
return 0; if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
#endif && ! all_adjacent)
return 0;
}
/* If we get here, we have passed all the tests and the combination is /* If we get here, we have passed all the tests and the combination is
to be allowed. */ to be allowed. */
...@@ -3114,7 +3115,6 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0, ...@@ -3114,7 +3115,6 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
subst_insn = i3; subst_insn = i3;
#if !HAVE_cc0
/* Many machines that don't use CC0 have insns that can both perform an /* Many machines that don't use CC0 have insns that can both perform an
arithmetic operation and set the condition code. These operations will arithmetic operation and set the condition code. These operations will
be represented as a PARALLEL with the first element of the vector be represented as a PARALLEL with the first element of the vector
...@@ -3126,7 +3126,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0, ...@@ -3126,7 +3126,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
needed, and make the PARALLEL by just replacing I2DEST in I3SRC with needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
I2SRC. Later we will make the PARALLEL that contains I2. */ I2SRC. Later we will make the PARALLEL that contains I2. */
if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET if (!HAVE_cc0 && i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
&& GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
&& CONST_INT_P (XEXP (SET_SRC (PATTERN (i3)), 1)) && CONST_INT_P (XEXP (SET_SRC (PATTERN (i3)), 1))
&& rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest)) && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
...@@ -3216,7 +3216,6 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0, ...@@ -3216,7 +3216,6 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
i2_is_used = 1; i2_is_used = 1;
} }
} }
#endif
if (i2_is_used == 0) if (i2_is_used == 0)
{ {
...@@ -3644,9 +3643,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0, ...@@ -3644,9 +3643,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
are set between I2 and I3. */ are set between I2 and I3. */
if (insn_code_number < 0 if (insn_code_number < 0
&& (split = find_split_point (&newpat, i3, false)) != 0 && (split = find_split_point (&newpat, i3, false)) != 0
#if HAVE_cc0 && (!HAVE_cc0 || REG_P (i2dest))
&& REG_P (i2dest)
#endif
/* We need I2DEST in the proper mode. If it is a hard register /* We need I2DEST in the proper mode. If it is a hard register
or the only use of a pseudo, we can change its mode. or the only use of a pseudo, we can change its mode.
Make sure we don't change a hard register to have a mode that Make sure we don't change a hard register to have a mode that
...@@ -3916,9 +3913,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0, ...@@ -3916,9 +3913,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
&& !(GET_CODE (SET_DEST (set1)) == SUBREG && !(GET_CODE (SET_DEST (set1)) == SUBREG
&& find_reg_note (i2, REG_DEAD, && find_reg_note (i2, REG_DEAD,
SUBREG_REG (SET_DEST (set1)))) SUBREG_REG (SET_DEST (set1))))
#if HAVE_cc0 && (!HAVE_cc0 || !reg_referenced_p (cc0_rtx, set0))
&& !reg_referenced_p (cc0_rtx, set0)
#endif
/* If I3 is a jump, ensure that set0 is a jump so that /* If I3 is a jump, ensure that set0 is a jump so that
we do not create invalid RTL. */ we do not create invalid RTL. */
&& (!JUMP_P (i3) || SET_DEST (set0) == pc_rtx) && (!JUMP_P (i3) || SET_DEST (set0) == pc_rtx)
...@@ -3933,9 +3928,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0, ...@@ -3933,9 +3928,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
&& !(GET_CODE (SET_DEST (set0)) == SUBREG && !(GET_CODE (SET_DEST (set0)) == SUBREG
&& find_reg_note (i2, REG_DEAD, && find_reg_note (i2, REG_DEAD,
SUBREG_REG (SET_DEST (set0)))) SUBREG_REG (SET_DEST (set0))))
#if HAVE_cc0 && (!HAVE_cc0 || !reg_referenced_p (cc0_rtx, set1))
&& !reg_referenced_p (cc0_rtx, set1)
#endif
/* If I3 is a jump, ensure that set1 is a jump so that /* If I3 is a jump, ensure that set1 is a jump so that
we do not create invalid RTL. */ we do not create invalid RTL. */
&& (!JUMP_P (i3) || SET_DEST (set1) == pc_rtx) && (!JUMP_P (i3) || SET_DEST (set1) == pc_rtx)
...@@ -4000,19 +3993,18 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0, ...@@ -4000,19 +3993,18 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
} }
} }
#if HAVE_cc0
/* If I2 is the CC0 setter and I3 is the CC0 user then check whether /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
they are adjacent to each other or not. */ they are adjacent to each other or not. */
{ if (HAVE_cc0)
rtx_insn *p = prev_nonnote_insn (i3); {
if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat rtx_insn *p = prev_nonnote_insn (i3);
&& sets_cc0_p (newi2pat)) if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
{ && sets_cc0_p (newi2pat))
undo_all (); {
return 0; undo_all ();
} return 0;
} }
#endif }
/* Only allow this combination if insn_rtx_costs reports that the /* Only allow this combination if insn_rtx_costs reports that the
replacement instructions are cheaper than the originals. */ replacement instructions are cheaper than the originals. */
...@@ -13796,9 +13788,7 @@ distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2, ...@@ -13796,9 +13788,7 @@ distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2,
{ {
rtx set = single_set (tem_insn); rtx set = single_set (tem_insn);
rtx inner_dest = 0; rtx inner_dest = 0;
#if HAVE_cc0
rtx_insn *cc0_setter = NULL; rtx_insn *cc0_setter = NULL;
#endif
if (set != 0) if (set != 0)
for (inner_dest = SET_DEST (set); for (inner_dest = SET_DEST (set);
...@@ -13842,7 +13832,6 @@ distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2, ...@@ -13842,7 +13832,6 @@ distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2,
if (tem_insn == i2) if (tem_insn == i2)
i2 = NULL; i2 = NULL;
#if HAVE_cc0
/* Delete the setter too. */ /* Delete the setter too. */
if (cc0_setter) if (cc0_setter)
{ {
...@@ -13859,7 +13848,6 @@ distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2, ...@@ -13859,7 +13848,6 @@ distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2,
if (cc0_setter == i2) if (cc0_setter == i2)
i2 = NULL; i2 = NULL;
} }
#endif
} }
else else
{ {
......
...@@ -3859,10 +3859,7 @@ can_move_insns_across (rtx_insn *from, rtx_insn *to, ...@@ -3859,10 +3859,7 @@ can_move_insns_across (rtx_insn *from, rtx_insn *to,
if (NONDEBUG_INSN_P (insn)) if (NONDEBUG_INSN_P (insn))
{ {
if (!bitmap_intersect_p (test_set, local_merge_live) if (!bitmap_intersect_p (test_set, local_merge_live)
#if HAVE_cc0 && (!HAVE_cc0 || !sets_cc0_p (insn)))
&& !sets_cc0_p (insn)
#endif
)
{ {
max_to = insn; max_to = insn;
break; break;
......
...@@ -2029,21 +2029,20 @@ final (rtx_insn *first, FILE *file, int optimize_p) ...@@ -2029,21 +2029,20 @@ final (rtx_insn *first, FILE *file, int optimize_p)
last_ignored_compare = 0; last_ignored_compare = 0;
#if HAVE_cc0 if (HAVE_cc0)
for (insn = first; insn; insn = NEXT_INSN (insn)) for (insn = first; insn; insn = NEXT_INSN (insn))
{ {
/* If CC tracking across branches is enabled, record the insn which /* If CC tracking across branches is enabled, record the insn which
jumps to each branch only reached from one place. */ jumps to each branch only reached from one place. */
if (optimize_p && JUMP_P (insn)) if (optimize_p && JUMP_P (insn))
{ {
rtx lab = JUMP_LABEL (insn); rtx lab = JUMP_LABEL (insn);
if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1) if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
{ {
LABEL_REFS (lab) = insn; LABEL_REFS (lab) = insn;
} }
} }
} }
#endif
init_recog (); init_recog ();
......
...@@ -2048,21 +2048,23 @@ insert_insn_end_basic_block (struct gcse_expr *expr, basic_block bb) ...@@ -2048,21 +2048,23 @@ insert_insn_end_basic_block (struct gcse_expr *expr, basic_block bb)
&& (!single_succ_p (bb) && (!single_succ_p (bb)
|| single_succ_edge (bb)->flags & EDGE_ABNORMAL))) || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
{ {
#if HAVE_cc0
/* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
if cc0 isn't set. */ if cc0 isn't set. */
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); if (HAVE_cc0)
if (note)
insn = safe_as_a <rtx_insn *> (XEXP (note, 0));
else
{ {
rtx_insn *maybe_cc0_setter = prev_nonnote_insn (insn); rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
if (maybe_cc0_setter if (note)
&& INSN_P (maybe_cc0_setter) insn = safe_as_a <rtx_insn *> (XEXP (note, 0));
&& sets_cc0_p (PATTERN (maybe_cc0_setter))) else
insn = maybe_cc0_setter; {
rtx_insn *maybe_cc0_setter = prev_nonnote_insn (insn);
if (maybe_cc0_setter
&& INSN_P (maybe_cc0_setter)
&& sets_cc0_p (PATTERN (maybe_cc0_setter)))
insn = maybe_cc0_setter;
}
} }
#endif
/* FIXME: What if something in cc0/jump uses value set in new insn? */ /* FIXME: What if something in cc0/jump uses value set in new insn? */
new_insn = emit_insn_before_noloc (pat, insn, bb); new_insn = emit_insn_before_noloc (pat, insn, bb);
} }
......
...@@ -4724,10 +4724,7 @@ find_moveable_pseudos (void) ...@@ -4724,10 +4724,7 @@ find_moveable_pseudos (void)
{ {
if (bitmap_bit_p (def_bb_moveable, regno) if (bitmap_bit_p (def_bb_moveable, regno)
&& !control_flow_insn_p (use_insn) && !control_flow_insn_p (use_insn)
#if HAVE_cc0 && (!HAVE_cc0 || !sets_cc0_p (use_insn)))
&& !sets_cc0_p (use_insn)
#endif
)
{ {
if (modified_between_p (DF_REF_REG (use), def_insn, use_insn)) if (modified_between_p (DF_REF_REG (use), def_insn, use_insn))
{ {
......
...@@ -1364,10 +1364,8 @@ try_merge_delay_insns (rtx insn, rtx_insn *thread) ...@@ -1364,10 +1364,8 @@ try_merge_delay_insns (rtx insn, rtx_insn *thread)
continue; continue;
if (GET_CODE (next_to_match) == GET_CODE (trial) if (GET_CODE (next_to_match) == GET_CODE (trial)
#if HAVE_cc0
/* We can't share an insn that sets cc0. */ /* We can't share an insn that sets cc0. */
&& ! sets_cc0_p (pat) && (!HAVE_cc0 || ! sets_cc0_p (pat))
#endif
&& ! insn_references_resource_p (trial, &set, true) && ! insn_references_resource_p (trial, &set, true)
&& ! insn_sets_resource_p (trial, &set, true) && ! insn_sets_resource_p (trial, &set, true)
&& ! insn_sets_resource_p (trial, &needed, true) && ! insn_sets_resource_p (trial, &needed, true)
...@@ -1437,9 +1435,7 @@ try_merge_delay_insns (rtx insn, rtx_insn *thread) ...@@ -1437,9 +1435,7 @@ try_merge_delay_insns (rtx insn, rtx_insn *thread)
if (! insn_references_resource_p (dtrial, &set, true) if (! insn_references_resource_p (dtrial, &set, true)
&& ! insn_sets_resource_p (dtrial, &set, true) && ! insn_sets_resource_p (dtrial, &set, true)
&& ! insn_sets_resource_p (dtrial, &needed, true) && ! insn_sets_resource_p (dtrial, &needed, true)
#if HAVE_cc0 && (!HAVE_cc0 || ! sets_cc0_p (PATTERN (dtrial)))
&& ! sets_cc0_p (PATTERN (dtrial))
#endif
&& rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial)) && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
/* Check that DTRIAL and NEXT_TO_MATCH does not reference a /* Check that DTRIAL and NEXT_TO_MATCH does not reference a
resource modified between them (only dtrial is checked because resource modified between them (only dtrial is checked because
...@@ -2114,10 +2110,8 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2114,10 +2110,8 @@ fill_simple_delay_slots (int non_jumps_p)
filter_flags ? &fset : &set, filter_flags ? &fset : &set,
true) true)
&& ! insn_sets_resource_p (trial, &needed, true) && ! insn_sets_resource_p (trial, &needed, true)
#if HAVE_cc0
/* Can't separate set of cc0 from its use. */ /* Can't separate set of cc0 from its use. */
&& ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat)) && (!HAVE_cc0 || ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat)))
#endif
&& ! can_throw_internal (trial)) && ! can_throw_internal (trial))
{ {
trial = try_split (pat, trial, 1); trial = try_split (pat, trial, 1);
...@@ -2249,9 +2243,7 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2249,9 +2243,7 @@ fill_simple_delay_slots (int non_jumps_p)
&& ! insn_references_resource_p (trial, &set, true) && ! insn_references_resource_p (trial, &set, true)
&& ! insn_sets_resource_p (trial, &set, true) && ! insn_sets_resource_p (trial, &set, true)
&& ! insn_sets_resource_p (trial, &needed, true) && ! insn_sets_resource_p (trial, &needed, true)
#if HAVE_cc0 && (!HAVE_cc0 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat)))
&& ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
#endif
&& ! (maybe_never && may_trap_or_fault_p (pat)) && ! (maybe_never && may_trap_or_fault_p (pat))
&& (trial = try_split (pat, trial, 0)) && (trial = try_split (pat, trial, 0))
&& eligible_for_delay (insn, slots_filled, trial, flags) && eligible_for_delay (insn, slots_filled, trial, flags)
...@@ -2297,9 +2289,7 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2297,9 +2289,7 @@ fill_simple_delay_slots (int non_jumps_p)
&& ! insn_references_resource_p (next_trial, &set, true) && ! insn_references_resource_p (next_trial, &set, true)
&& ! insn_sets_resource_p (next_trial, &set, true) && ! insn_sets_resource_p (next_trial, &set, true)
&& ! insn_sets_resource_p (next_trial, &needed, true) && ! insn_sets_resource_p (next_trial, &needed, true)
#if HAVE_cc0 && (!HAVE_cc0 || ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial)))
&& ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
#endif
&& ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial))) && ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial)))
&& (next_trial = try_split (PATTERN (next_trial), next_trial, 0)) && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
&& eligible_for_delay (insn, slots_filled, next_trial, flags) && eligible_for_delay (insn, slots_filled, next_trial, flags)
...@@ -2510,10 +2500,8 @@ fill_slots_from_thread (rtx_insn *insn, rtx condition, rtx thread_or_return, ...@@ -2510,10 +2500,8 @@ fill_slots_from_thread (rtx_insn *insn, rtx condition, rtx thread_or_return,
if (! insn_references_resource_p (trial, &set, true) if (! insn_references_resource_p (trial, &set, true)
&& ! insn_sets_resource_p (trial, &set, true) && ! insn_sets_resource_p (trial, &set, true)
&& ! insn_sets_resource_p (trial, &needed, true) && ! insn_sets_resource_p (trial, &needed, true)
#if HAVE_cc0 && (!HAVE_cc0 || (! (reg_mentioned_p (cc0_rtx, pat)
&& ! (reg_mentioned_p (cc0_rtx, pat) && (! own_thread || ! sets_cc0_p (pat)))))
&& (! own_thread || ! sets_cc0_p (pat)))
#endif
&& ! can_throw_internal (trial)) && ! can_throw_internal (trial))
{ {
rtx prior_insn; rtx prior_insn;
......
...@@ -2609,9 +2609,9 @@ sched_analyze_2 (struct deps_desc *deps, rtx x, rtx_insn *insn) ...@@ -2609,9 +2609,9 @@ sched_analyze_2 (struct deps_desc *deps, rtx x, rtx_insn *insn)
return; return;
case CC0: case CC0:
#if !HAVE_cc0 if (!HAVE_cc0)
gcc_unreachable (); gcc_unreachable ();
#endif
/* User of CC0 depends on immediately preceding insn. */ /* User of CC0 depends on immediately preceding insn. */
SCHED_GROUP_P (insn) = 1; SCHED_GROUP_P (insn) = 1;
/* Don't move CC0 setter to another block (it can set up the /* Don't move CC0 setter to another block (it can set up the
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment