Commit f283421d by Richard Henderson Committed by Richard Henderson

loop.c (get_condition): Don't combine when either compare is MODE_CC.

* loop.c (get_condition): Don't combine when either compare is MODE_CC.
* alpha.c (alpha_emit_conditional_branch): New function.  Taken from
the body of beq; additionally set the mode of the branch to CCmode for
FP compares and not fast_math.
(alpha_emit_conditional_move): Always use a compare insn for FP
when not fast_math, as well as setting CCmode on the cmov.
* alpha.md (beq, bne, blt, et al): Call alpha_emit_conditional_branch.

From-SVN: r19645
parent 76080169
Sat May 9 02:02:15 1998 Richard Henderson <rth@cygnus.com>
* loop.c (get_condition): Don't combine when either compare is MODE_CC.
* alpha.c (alpha_emit_conditional_branch): New function. Taken from
the body of beq; additionally set the mode of the branch to CCmode for
FP compares and not fast_math.
(alpha_emit_conditional_move): Always use a compare insn for FP
when not fast_math, as well as setting CCmode on the cmov.
* alpha.md (beq, bne, blt, et al): Call alpha_emit_conditional_branch.
* machmode.h (COMPLEX_MODE_P): New macro.
Sat May 9 01:53:23 1998 Richard Henderson <rth@cygnus.com> Sat May 9 01:53:23 1998 Richard Henderson <rth@cygnus.com>
* haifa-sched.c (print_exp): Fix typo. * haifa-sched.c (print_exp): Fix typo.
......
...@@ -1223,6 +1223,120 @@ alpha_emit_set_long_const (target, c) ...@@ -1223,6 +1223,120 @@ alpha_emit_set_long_const (target, c)
} }
#endif /* HOST_BITS_PER_WIDE_INT == 64 */ #endif /* HOST_BITS_PER_WIDE_INT == 64 */
/* Generate the comparison for a conditional branch. */
rtx
alpha_emit_conditional_branch (code)
enum rtx_code code;
{
enum rtx_code cmp_code, branch_code;
enum machine_mode cmp_mode, branch_mode = VOIDmode;
rtx op0 = alpha_compare_op0, op1 = alpha_compare_op1;
rtx tem;
/* The general case: fold the comparison code to the types of compares
that we have, choosing the branch as necessary. */
switch (code)
{
case EQ: case LE: case LT: case LEU: case LTU:
/* We have these compares: */
cmp_code = code, branch_code = NE;
break;
case NE:
/* This must be reversed. */
cmp_code = EQ, branch_code = EQ;
break;
case GE: case GT: case GEU: case GTU:
/* For FP, we swap them, for INT, we reverse them. */
if (alpha_compare_fp_p)
{
cmp_code = swap_condition (code);
branch_code = NE;
tem = op0, op0 = op1, op1 = tem;
}
else
{
cmp_code = reverse_condition (code);
branch_code = EQ;
}
break;
default:
abort ();
}
if (alpha_compare_fp_p)
{
cmp_mode = DFmode;
if (flag_fast_math)
{
/* When we are not as concerned about non-finite values, and we
are comparing against zero, we can branch directly. */
if (op1 == CONST0_RTX (DFmode))
cmp_code = NIL, branch_code = code;
else if (op0 == CONST0_RTX (DFmode))
{
/* Undo the swap we probably did just above. */
tem = op0, op0 = op1, op1 = tem;
cmp_code = NIL, branch_code = swap_condition (cmp_code);
}
}
else
{
/* ??? We mark the the branch mode to be CCmode to prevent the
compare and branch from being combined, since the compare
insn follows IEEE rules that the branch does not. */
branch_mode = CCmode;
}
}
else
{
cmp_mode = DImode;
/* The following optimizations are only for signed compares. */
if (code != LEU && code != LTU && code != GEU && code != GTU)
{
/* Whee. Compare and branch against 0 directly. */
if (op1 == const0_rtx)
cmp_code = NIL, branch_code = code;
/* We want to use cmpcc/bcc when we can, since there is a zero delay
bypass between logicals and br/cmov on EV5. But we don't want to
force valid immediate constants into registers needlessly. */
else if (GET_CODE (op1) == CONST_INT)
{
HOST_WIDE_INT v = INTVAL (op1), n = -v;
if (! CONST_OK_FOR_LETTER_P (v, 'I')
&& (CONST_OK_FOR_LETTER_P (n, 'K')
|| CONST_OK_FOR_LETTER_P (n, 'L')))
{
cmp_code = PLUS, branch_code = code;
op1 = GEN_INT (n);
}
}
}
}
/* Force op0 into a register. */
if (GET_CODE (op0) != REG)
op0 = force_reg (cmp_mode, op0);
/* Emit an initial compare instruction, if necessary. */
tem = op0;
if (cmp_code != NIL)
{
tem = gen_reg_rtx (cmp_mode);
emit_move_insn (tem, gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1));
}
/* Return the branch comparison. */
return gen_rtx_fmt_ee (branch_code, branch_mode, tem, CONST0_RTX (cmp_mode));
}
/* Rewrite a comparison against zero CMP of the form /* Rewrite a comparison against zero CMP of the form
(CODE (cc0) (const_int 0)) so it can be written validly in (CODE (cc0) (const_int 0)) so it can be written validly in
a conditional move (if_then_else CMP ...). a conditional move (if_then_else CMP ...).
...@@ -1241,6 +1355,7 @@ alpha_emit_conditional_move (cmp, mode) ...@@ -1241,6 +1355,7 @@ alpha_emit_conditional_move (cmp, mode)
enum machine_mode cmp_mode enum machine_mode cmp_mode
= (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0)); = (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0));
enum machine_mode cmp_op_mode = alpha_compare_fp_p ? DFmode : DImode; enum machine_mode cmp_op_mode = alpha_compare_fp_p ? DFmode : DImode;
enum machine_mode cmov_mode = VOIDmode;
rtx tem; rtx tem;
if (alpha_compare_fp_p != FLOAT_MODE_P (mode)) if (alpha_compare_fp_p != FLOAT_MODE_P (mode))
...@@ -1249,6 +1364,7 @@ alpha_emit_conditional_move (cmp, mode) ...@@ -1249,6 +1364,7 @@ alpha_emit_conditional_move (cmp, mode)
/* We may be able to use a conditional move directly. /* We may be able to use a conditional move directly.
This avoids emitting spurious compares. */ This avoids emitting spurious compares. */
if (signed_comparison_operator (cmp, cmp_op_mode) if (signed_comparison_operator (cmp, cmp_op_mode)
&& (!alpha_compare_fp_p || flag_fast_math)
&& (op0 == CONST0_RTX (cmp_mode) || op1 == CONST0_RTX (cmp_mode))) && (op0 == CONST0_RTX (cmp_mode) || op1 == CONST0_RTX (cmp_mode)))
return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
...@@ -1281,9 +1397,15 @@ alpha_emit_conditional_move (cmp, mode) ...@@ -1281,9 +1397,15 @@ alpha_emit_conditional_move (cmp, mode)
abort (); abort ();
} }
/* ??? We mark the the branch mode to be CCmode to prevent the compare
and cmov from being combined, since the compare insn follows IEEE
rules that the cmov does not. */
if (alpha_compare_fp_p && !flag_fast_math)
cmov_mode = CCmode;
tem = gen_reg_rtx (cmp_op_mode); tem = gen_reg_rtx (cmp_op_mode);
emit_move_insn (tem, gen_rtx_fmt_ee (code, cmp_op_mode, op0, op1)); emit_move_insn (tem, gen_rtx_fmt_ee (code, cmp_op_mode, op0, op1));
return gen_rtx_fmt_ee (cmov_code, VOIDmode, tem, CONST0_RTX (cmp_op_mode)); return gen_rtx_fmt_ee (cmov_code, cmov_mode, tem, CONST0_RTX (cmp_op_mode));
} }
/* Use ext[wlq][lh] as the Architecture Handbook describes for extracting /* Use ext[wlq][lh] as the Architecture Handbook describes for extracting
......
...@@ -2855,212 +2855,84 @@ ...@@ -2855,212 +2855,84 @@
}") }")
(define_expand "beq" (define_expand "beq"
[(set (match_dup 1) (match_dup 2)) [(set (pc)
(set (pc) (if_then_else (match_dup 1)
(if_then_else (match_dup 3)
(label_ref (match_operand 0 "" "")) (label_ref (match_operand 0 "" ""))
(pc)))] (pc)))]
"" ""
" "{ operands[1] = alpha_emit_conditional_branch (EQ); }")
{
enum machine_mode mode;
enum rtx_code compare_code = EQ, branch_code = NE;
if (alpha_compare_fp_p)
mode = DFmode;
else
{
mode = DImode;
/* We want to use cmpeq/bne when we can, since there is a zero-delay
bypass between logicals and br/cmov on the 21164. But we don't
want to force valid immediate constants into registers needlessly. */
if (GET_CODE (alpha_compare_op1) == CONST_INT
&& ((INTVAL (alpha_compare_op1) >= -0x8000
&& INTVAL (alpha_compare_op1) < 0)
|| (INTVAL (alpha_compare_op1) > 0xff
&& INTVAL (alpha_compare_op1) < 0x8000)))
{
compare_code = PLUS, branch_code = EQ;
alpha_compare_op1 = GEN_INT (- INTVAL (alpha_compare_op1));
}
}
operands[1] = gen_reg_rtx (mode);
operands[2] = gen_rtx_fmt_ee (compare_code, mode,
alpha_compare_op0, alpha_compare_op1);
operands[3] = gen_rtx_fmt_ee (branch_code, VOIDmode,
operands[1], CONST0_RTX (mode));
}")
(define_expand "bne" (define_expand "bne"
[(set (match_dup 1) (match_dup 2)) [(set (pc)
(set (pc) (if_then_else (match_dup 1)
(if_then_else (match_dup 3)
(label_ref (match_operand 0 "" "")) (label_ref (match_operand 0 "" ""))
(pc)))] (pc)))]
"" ""
" "{ operands[1] = alpha_emit_conditional_branch (NE); }")
{
enum machine_mode mode;
enum rtx_code compare_code = EQ, branch_code = EQ;
if (alpha_compare_fp_p)
mode = DFmode;
else
{
mode = DImode;
/* We want to use cmpeq/bne when we can, since there is a zero-delay
bypass between logicals and br/cmov on the 21164. But we don't
want to force valid immediate constants into registers needlessly. */
if (GET_CODE (alpha_compare_op1) == CONST_INT
&& ((INTVAL (alpha_compare_op1) >= -0x8000
&& INTVAL (alpha_compare_op1) < 0)
|| (INTVAL (alpha_compare_op1) > 0xff
&& INTVAL (alpha_compare_op1) < 0x8000)))
{
compare_code = PLUS, branch_code = NE;
alpha_compare_op1 = GEN_INT (- INTVAL (alpha_compare_op1));
}
}
operands[1] = gen_reg_rtx (mode);
operands[2] = gen_rtx_fmt_ee (compare_code, mode,
alpha_compare_op0, alpha_compare_op1);
operands[3] = gen_rtx_fmt_ee (branch_code, VOIDmode,
operands[1], CONST0_RTX (mode));
}")
(define_expand "blt" (define_expand "blt"
[(set (match_dup 1) (match_dup 2)) [(set (pc)
(set (pc) (if_then_else (match_dup 1)
(if_then_else (match_dup 3)
(label_ref (match_operand 0 "" "")) (label_ref (match_operand 0 "" ""))
(pc)))] (pc)))]
"" ""
" "{ operands[1] = alpha_emit_conditional_branch (LT); }")
{
enum machine_mode mode = alpha_compare_fp_p ? DFmode : DImode;
operands[1] = gen_reg_rtx (mode);
operands[2] = gen_rtx_LT (mode, alpha_compare_op0, alpha_compare_op1);
operands[3] = gen_rtx_NE (VOIDmode, operands[1], CONST0_RTX (mode));
}")
(define_expand "ble" (define_expand "ble"
[(set (match_dup 1) (match_dup 2)) [(set (pc)
(set (pc) (if_then_else (match_dup 1)
(if_then_else (match_dup 3)
(label_ref (match_operand 0 "" "")) (label_ref (match_operand 0 "" ""))
(pc)))] (pc)))]
"" ""
" "{ operands[1] = alpha_emit_conditional_branch (LE); }")
{
enum machine_mode mode = alpha_compare_fp_p ? DFmode : DImode;
operands[1] = gen_reg_rtx (mode);
operands[2] = gen_rtx_LE (mode, alpha_compare_op0, alpha_compare_op1);
operands[3] = gen_rtx_NE (VOIDmode, operands[1], CONST0_RTX (mode));
}")
(define_expand "bgt" (define_expand "bgt"
[(set (match_dup 1) (match_dup 2)) [(set (pc)
(set (pc) (if_then_else (match_dup 1)
(if_then_else (match_dup 3)
(label_ref (match_operand 0 "" "")) (label_ref (match_operand 0 "" ""))
(pc)))] (pc)))]
"" ""
" "{ operands[1] = alpha_emit_conditional_branch (GT); }")
{
if (alpha_compare_fp_p)
{
operands[1] = gen_reg_rtx (DFmode);
operands[2] = gen_rtx_LT (DFmode, alpha_compare_op1, alpha_compare_op0);
operands[3] = gen_rtx_NE (VOIDmode, operands[1], CONST0_RTX (DFmode));
}
else
{
operands[1] = gen_reg_rtx (DImode);
operands[2] = gen_rtx_LE (DImode, alpha_compare_op0, alpha_compare_op1);
operands[3] = gen_rtx_EQ (VOIDmode, operands[1], const0_rtx);
}
}")
(define_expand "bge" (define_expand "bge"
[(set (match_dup 1) (match_dup 2)) [(set (pc)
(set (pc) (if_then_else (match_dup 1)
(if_then_else (match_dup 3)
(label_ref (match_operand 0 "" "")) (label_ref (match_operand 0 "" ""))
(pc)))] (pc)))]
"" ""
" "{ operands[1] = alpha_emit_conditional_branch (GE); }")
{
if (alpha_compare_fp_p)
{
operands[1] = gen_reg_rtx (DFmode);
operands[2] = gen_rtx_LE (DFmode, alpha_compare_op1, alpha_compare_op0);
operands[3] = gen_rtx_NE (VOIDmode, operands[1], CONST0_RTX (DFmode));
}
else
{
operands[1] = gen_reg_rtx (DImode);
operands[2] = gen_rtx_LT (DImode, alpha_compare_op0, alpha_compare_op1);
operands[3] = gen_rtx_EQ (VOIDmode, operands[1], const0_rtx);
}
}")
(define_expand "bltu" (define_expand "bltu"
[(set (match_dup 1) (match_dup 2)) [(set (pc)
(set (pc) (if_then_else (match_dup 1)
(if_then_else (match_dup 3)
(label_ref (match_operand 0 "" "")) (label_ref (match_operand 0 "" ""))
(pc)))] (pc)))]
"" ""
" "{ operands[1] = alpha_emit_conditional_branch (LTU); }")
{
operands[1] = gen_reg_rtx (DImode);
operands[2] = gen_rtx_LTU (DImode, alpha_compare_op0, alpha_compare_op1);
operands[3] = gen_rtx_NE (VOIDmode, operands[1], const0_rtx);
}")
(define_expand "bleu" (define_expand "bleu"
[(set (match_dup 1) (match_dup 2)) [(set (pc)
(set (pc) (if_then_else (match_dup 1)
(if_then_else (match_dup 3)
(label_ref (match_operand 0 "" "")) (label_ref (match_operand 0 "" ""))
(pc)))] (pc)))]
"" ""
" "{ operands[1] = alpha_emit_conditional_branch (LEU); }")
{
operands[1] = gen_reg_rtx (DImode);
operands[2] = gen_rtx_LEU (DImode, alpha_compare_op0, alpha_compare_op1);
operands[3] = gen_rtx_NE (VOIDmode, operands[1], const0_rtx);
}")
(define_expand "bgtu" (define_expand "bgtu"
[(set (match_dup 1) (match_dup 2)) [(set (pc)
(set (pc) (if_then_else (match_dup 1)
(if_then_else (match_dup 3)
(label_ref (match_operand 0 "" "")) (label_ref (match_operand 0 "" ""))
(pc)))] (pc)))]
"" ""
" "{ operands[1] = alpha_emit_conditional_branch (GTU); }")
{
operands[1] = gen_reg_rtx (DImode);
operands[2] = gen_rtx_LEU (DImode, alpha_compare_op0, alpha_compare_op1);
operands[3] = gen_rtx_EQ (VOIDmode, operands[1], const0_rtx);
}")
(define_expand "bgeu" (define_expand "bgeu"
[(set (match_dup 1) (match_dup 2)) [(set (pc)
(set (pc) (if_then_else (match_dup 1)
(if_then_else (match_dup 3)
(label_ref (match_operand 0 "" "")) (label_ref (match_operand 0 "" ""))
(pc)))] (pc)))]
"" ""
" "{ operands[1] = alpha_emit_conditional_branch (GEU); }")
{
operands[1] = gen_reg_rtx (DImode);
operands[2] = gen_rtx_LTU (DImode, alpha_compare_op0, alpha_compare_op1);
operands[3] = gen_rtx_EQ (VOIDmode, operands[1], const0_rtx);
}")
(define_expand "seq" (define_expand "seq"
[(set (match_operand:DI 0 "register_operand" "") [(set (match_operand:DI 0 "register_operand" "")
......
...@@ -6978,6 +6978,7 @@ get_condition (jump, earliest) ...@@ -6978,6 +6978,7 @@ get_condition (jump, earliest)
rtx op0, op1; rtx op0, op1;
int reverse_code = 0; int reverse_code = 0;
int did_reverse_condition = 0; int did_reverse_condition = 0;
enum machine_mode mode;
/* If this is not a standard conditional jump, we can't parse it. */ /* If this is not a standard conditional jump, we can't parse it. */
if (GET_CODE (jump) != JUMP_INSN if (GET_CODE (jump) != JUMP_INSN
...@@ -6985,6 +6986,7 @@ get_condition (jump, earliest) ...@@ -6985,6 +6986,7 @@ get_condition (jump, earliest)
return 0; return 0;
code = GET_CODE (XEXP (SET_SRC (PATTERN (jump)), 0)); code = GET_CODE (XEXP (SET_SRC (PATTERN (jump)), 0));
mode = GET_MODE (XEXP (SET_SRC (PATTERN (jump)), 0));
op0 = XEXP (XEXP (SET_SRC (PATTERN (jump)), 0), 0); op0 = XEXP (XEXP (SET_SRC (PATTERN (jump)), 0), 0);
op1 = XEXP (XEXP (SET_SRC (PATTERN (jump)), 0), 1); op1 = XEXP (XEXP (SET_SRC (PATTERN (jump)), 0), 1);
...@@ -7051,6 +7053,13 @@ get_condition (jump, earliest) ...@@ -7051,6 +7053,13 @@ get_condition (jump, earliest)
{ {
enum machine_mode inner_mode = GET_MODE (SET_SRC (set)); enum machine_mode inner_mode = GET_MODE (SET_SRC (set));
/* ??? We may not combine comparisons done in a CCmode with
comparisons not done in a CCmode. This is to aid targets
like Alpha that have an IEEE compliant EQ instruction, and
a non-IEEE compliant BEQ instruction. The use of CCmode is
actually artificial, simply to prevent the combination, but
should not affect other platforms. */
if ((GET_CODE (SET_SRC (set)) == COMPARE if ((GET_CODE (SET_SRC (set)) == COMPARE
|| (((code == NE || (((code == NE
|| (code == LT || (code == LT
...@@ -7066,7 +7075,9 @@ get_condition (jump, earliest) ...@@ -7066,7 +7075,9 @@ get_condition (jump, earliest)
&& FLOAT_STORE_FLAG_VALUE < 0) && FLOAT_STORE_FLAG_VALUE < 0)
#endif #endif
)) ))
&& GET_RTX_CLASS (GET_CODE (SET_SRC (set))) == '<'))) && GET_RTX_CLASS (GET_CODE (SET_SRC (set))) == '<'))
&& ((GET_MODE_CLASS (mode) == MODE_CC)
!= (GET_MODE_CLASS (inner_mode) == MODE_CC)))
x = SET_SRC (set); x = SET_SRC (set);
else if (((code == EQ else if (((code == EQ
|| (code == GE || (code == GE
...@@ -7082,7 +7093,9 @@ get_condition (jump, earliest) ...@@ -7082,7 +7093,9 @@ get_condition (jump, earliest)
&& FLOAT_STORE_FLAG_VALUE < 0) && FLOAT_STORE_FLAG_VALUE < 0)
#endif #endif
)) ))
&& GET_RTX_CLASS (GET_CODE (SET_SRC (set))) == '<') && GET_RTX_CLASS (GET_CODE (SET_SRC (set))) == '<'
&& ((GET_MODE_CLASS (mode) == MODE_CC)
!= (GET_MODE_CLASS (inner_mode) == MODE_CC)))
{ {
/* We might have reversed a LT to get a GE here. But this wasn't /* We might have reversed a LT to get a GE here. But this wasn't
actually the comparison of data, so we don't flag that we actually the comparison of data, so we don't flag that we
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment