Commit e26ec0bb by Roger Sayle Committed by Roger Sayle

fold-const.c (fold_comparison): New subroutine of fold_binary containing…

fold-const.c (fold_comparison): New subroutine of fold_binary containing transformations common to both the...


	* fold-const.c (fold_comparison): New subroutine of fold_binary
	containing transformations common to both the equality and
	ordering relational operators, factored out of fold_binary.
	(fold_binary): Separate out the equality operators (EQ_EXPR
	and NE_EXPR) from the ordering operators (LT_EXPR, GT_EXPR,
	LE_EXPR and GE_EXPR), calling fold_comparison to perform the
	transformations common to both.
	(fold_div_compare): Fix latent bugs in the previously unreachable 
	LT_EXPR and GE_EXPR cases.

From-SVN: r111423
parent 6bb9adda
2006-02-24 Roger Sayle <roger@eyesopen.com>
* fold-const.c (fold_comparison): New subroutine of fold_binary
containing transformations common to both the equality and
ordering relational operators, factored out of fold_binary.
(fold_binary): Separate out the equality operators (EQ_EXPR
and NE_EXPR) from the ordering operators (LT_EXPR, GT_EXPR,
LE_EXPR and GE_EXPR), calling fold_comparison to perform the
transformations common to both.
(fold_div_compare): Fix latent bugs in the previously unreachable
LT_EXPR and GE_EXPR cases.
2006-02-24 David Edelsohn <edelsohn@gnu.org> 2006-02-24 David Edelsohn <edelsohn@gnu.org>
* config/rs6000/rs6000.c (rs6000_init_builtins): Change * config/rs6000/rs6000.c (rs6000_init_builtins): Change
......
...@@ -6114,7 +6114,7 @@ fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1) ...@@ -6114,7 +6114,7 @@ fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
case LT_EXPR: case LT_EXPR:
if (TREE_OVERFLOW (lo)) if (TREE_OVERFLOW (lo))
return omit_one_operand (type, integer_zero_node, arg00); return omit_one_operand (type, integer_one_node, arg00);
return fold_build2 (LT_EXPR, type, arg00, lo); return fold_build2 (LT_EXPR, type, arg00, lo);
case LE_EXPR: case LE_EXPR:
...@@ -6129,7 +6129,7 @@ fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1) ...@@ -6129,7 +6129,7 @@ fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
case GE_EXPR: case GE_EXPR:
if (TREE_OVERFLOW (lo)) if (TREE_OVERFLOW (lo))
return omit_one_operand (type, integer_one_node, arg00); return omit_one_operand (type, integer_zero_node, arg00);
return fold_build2 (GE_EXPR, type, arg00, lo); return fold_build2 (GE_EXPR, type, arg00, lo);
default: default:
...@@ -7223,6 +7223,446 @@ fold_minmax (enum tree_code code, tree type, tree op0, tree op1) ...@@ -7223,6 +7223,446 @@ fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
return NULL_TREE; return NULL_TREE;
} }
/* Subroutine of fold_binary. This routine performs all of the
transformations that are common to the equality/inequality
operators (EQ_EXPR and NE_EXPR) and the ordering operators
(LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
fold_binary should call fold_binary. Fold a comparison with
tree code CODE and type TYPE with operands OP0 and OP1. Return
the folded comparison or NULL_TREE. */
static tree
fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
{
tree arg0, arg1, tem;
arg0 = op0;
arg1 = op1;
STRIP_SIGN_NOPS (arg0);
STRIP_SIGN_NOPS (arg1);
tem = fold_relational_const (code, type, arg0, arg1);
if (tem != NULL_TREE)
return tem;
/* If one arg is a real or integer constant, put it last. */
if (tree_swap_operands_p (arg0, arg1, true))
return fold_build2 (swap_tree_comparison (code), type, op1, op0);
/* If this is a comparison of two exprs that look like an
ARRAY_REF of the same object, then we can fold this to a
comparison of the two offsets. */
{
tree base0, offset0, base1, offset1;
if (extract_array_ref (arg0, &base0, &offset0)
&& extract_array_ref (arg1, &base1, &offset1)
&& operand_equal_p (base0, base1, 0))
{
/* Handle no offsets on both sides specially. */
if (offset0 == NULL_TREE && offset1 == NULL_TREE)
return fold_build2 (code, type, integer_zero_node,
integer_zero_node);
if (!offset0 || !offset1
|| TREE_TYPE (offset0) == TREE_TYPE (offset1))
{
if (offset0 == NULL_TREE)
offset0 = build_int_cst (TREE_TYPE (offset1), 0);
if (offset1 == NULL_TREE)
offset1 = build_int_cst (TREE_TYPE (offset0), 0);
return fold_build2 (code, type, offset0, offset1);
}
}
}
/* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
&& !TYPE_UNSIGNED (TREE_TYPE (arg1))
&& !(flag_wrapv || flag_trapv))
&& (TREE_CODE (arg1) == INTEGER_CST
&& !TREE_OVERFLOW (arg1)))
{
tree const1 = TREE_OPERAND (arg0, 1);
tree const2 = arg1;
tree variable = TREE_OPERAND (arg0, 0);
tree lhs;
int lhs_add;
lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
TREE_TYPE (arg1), const2, const1);
if (TREE_CODE (lhs) == TREE_CODE (arg1)
&& (TREE_CODE (lhs) != INTEGER_CST
|| !TREE_OVERFLOW (lhs)))
return fold_build2 (code, type, variable, lhs);
}
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree targ0 = strip_float_extensions (arg0);
tree targ1 = strip_float_extensions (arg1);
tree newtype = TREE_TYPE (targ0);
if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
newtype = TREE_TYPE (targ1);
/* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
return fold_build2 (code, type, fold_convert (newtype, targ0),
fold_convert (newtype, targ1));
/* (-a) CMP (-b) -> b CMP a */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& TREE_CODE (arg1) == NEGATE_EXPR)
return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
TREE_OPERAND (arg0, 0));
if (TREE_CODE (arg1) == REAL_CST)
{
REAL_VALUE_TYPE cst;
cst = TREE_REAL_CST (arg1);
/* (-a) CMP CST -> a swap(CMP) (-CST) */
if (TREE_CODE (arg0) == NEGATE_EXPR)
return fold_build2 (swap_tree_comparison (code), type,
TREE_OPERAND (arg0, 0),
build_real (TREE_TYPE (arg1),
REAL_VALUE_NEGATE (cst)));
/* IEEE doesn't distinguish +0 and -0 in comparisons. */
/* a CMP (-0) -> a CMP 0 */
if (REAL_VALUE_MINUS_ZERO (cst))
return fold_build2 (code, type, arg0,
build_real (TREE_TYPE (arg1), dconst0));
/* x != NaN is always true, other ops are always false. */
if (REAL_VALUE_ISNAN (cst)
&& ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
{
tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
return omit_one_operand (type, tem, arg0);
}
/* Fold comparisons against infinity. */
if (REAL_VALUE_ISINF (cst))
{
tem = fold_inf_compare (code, type, arg0, arg1);
if (tem != NULL_TREE)
return tem;
}
}
/* If this is a comparison of a real constant with a PLUS_EXPR
or a MINUS_EXPR of a real constant, we can convert it into a
comparison with a revised real constant as long as no overflow
occurs when unsafe_math_optimizations are enabled. */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg1) == REAL_CST
&& (TREE_CODE (arg0) == PLUS_EXPR
|| TREE_CODE (arg0) == MINUS_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
&& 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
? MINUS_EXPR : PLUS_EXPR,
arg1, TREE_OPERAND (arg0, 1), 0))
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* Likewise, we can simplify a comparison of a real constant with
a MINUS_EXPR whose first operand is also a real constant, i.e.
(c1 - x) < c2 becomes x > c1-c2. */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg1) == REAL_CST
&& TREE_CODE (arg0) == MINUS_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
&& 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
arg1, 0))
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (swap_tree_comparison (code), type,
TREE_OPERAND (arg0, 1), tem);
/* Fold comparisons against built-in math functions. */
if (TREE_CODE (arg1) == REAL_CST
&& flag_unsafe_math_optimizations
&& ! flag_errno_math)
{
enum built_in_function fcode = builtin_mathfn_code (arg0);
if (fcode != END_BUILTINS)
{
tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
if (tem != NULL_TREE)
return tem;
}
}
}
/* Convert foo++ == CONST into ++foo == CONST + INCR. */
if (TREE_CONSTANT (arg1)
&& (TREE_CODE (arg0) == POSTINCREMENT_EXPR
|| TREE_CODE (arg0) == POSTDECREMENT_EXPR)
/* This optimization is invalid for ordered comparisons
if CONST+INCR overflows or if foo+incr might overflow.
This optimization is invalid for floating point due to rounding.
For pointer types we assume overflow doesn't happen. */
&& (POINTER_TYPE_P (TREE_TYPE (arg0))
|| (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
&& (code == EQ_EXPR || code == NE_EXPR))))
{
tree varop, newconst;
if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
{
newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
arg1, TREE_OPERAND (arg0, 1));
varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
}
else
{
newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
arg1, TREE_OPERAND (arg0, 1));
varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
}
/* If VAROP is a reference to a bitfield, we must mask
the constant by the width of the field. */
if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
&& DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
&& host_integerp (DECL_SIZE (TREE_OPERAND
(TREE_OPERAND (varop, 0), 1)), 1))
{
tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
tree folded_compare, shift;
/* First check whether the comparison would come out
always the same. If we don't do that we would
change the meaning with the masking. */
folded_compare = fold_build2 (code, type,
TREE_OPERAND (varop, 0), arg1);
if (TREE_CODE (folded_compare) == INTEGER_CST)
return omit_one_operand (type, folded_compare, varop);
shift = build_int_cst (NULL_TREE,
TYPE_PRECISION (TREE_TYPE (varop)) - size);
shift = fold_convert (TREE_TYPE (varop), shift);
newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
newconst, shift);
newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
newconst, shift);
}
return fold_build2 (code, type, varop, newconst);
}
if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
&& (TREE_CODE (arg0) == NOP_EXPR
|| TREE_CODE (arg0) == CONVERT_EXPR))
{
/* If we are widening one operand of an integer comparison,
see if the other operand is similarly being widened. Perhaps we
can do the comparison in the narrower type. */
tem = fold_widened_comparison (code, type, arg0, arg1);
if (tem)
return tem;
/* Or if we are changing signedness. */
tem = fold_sign_changed_comparison (code, type, arg0, arg1);
if (tem)
return tem;
}
/* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
constant, we can simplify it. */
if (TREE_CODE (arg1) == INTEGER_CST
&& (TREE_CODE (arg0) == MIN_EXPR
|| TREE_CODE (arg0) == MAX_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
tem = optimize_minmax_comparison (code, type, op0, op1);
if (tem)
return tem;
}
/* Simplify comparison of something with itself. (For IEEE
floating-point, we can only do some of these simplifications.) */
if (operand_equal_p (arg0, arg1, 0))
{
switch (code)
{
case EQ_EXPR:
if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
|| ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
return constant_boolean_node (1, type);
break;
case GE_EXPR:
case LE_EXPR:
if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
|| ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
return constant_boolean_node (1, type);
return fold_build2 (EQ_EXPR, type, arg0, arg1);
case NE_EXPR:
/* For NE, we can only do this simplification if integer
or we don't honor IEEE floating point NaNs. */
if (FLOAT_TYPE_P (TREE_TYPE (arg0))
&& HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
break;
/* ... fall through ... */
case GT_EXPR:
case LT_EXPR:
return constant_boolean_node (0, type);
default:
gcc_unreachable ();
}
}
/* If we are comparing an expression that just has comparisons
of two integer values, arithmetic expressions of those comparisons,
and constants, we can simplify it. There are only three cases
to check: the two values can either be equal, the first can be
greater, or the second can be greater. Fold the expression for
those three values. Since each value must be 0 or 1, we have
eight possibilities, each of which corresponds to the constant 0
or 1 or one of the six possible comparisons.
This handles common cases like (a > b) == 0 but also handles
expressions like ((x > y) - (y > x)) > 0, which supposedly
occur in macroized code. */
if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
{
tree cval1 = 0, cval2 = 0;
int save_p = 0;
if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
/* Don't handle degenerate cases here; they should already
have been handled anyway. */
&& cval1 != 0 && cval2 != 0
&& ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
&& TREE_TYPE (cval1) == TREE_TYPE (cval2)
&& INTEGRAL_TYPE_P (TREE_TYPE (cval1))
&& TYPE_MAX_VALUE (TREE_TYPE (cval1))
&& TYPE_MAX_VALUE (TREE_TYPE (cval2))
&& ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
{
tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
/* We can't just pass T to eval_subst in case cval1 or cval2
was the same as ARG1. */
tree high_result
= fold_build2 (code, type,
eval_subst (arg0, cval1, maxval,
cval2, minval),
arg1);
tree equal_result
= fold_build2 (code, type,
eval_subst (arg0, cval1, maxval,
cval2, maxval),
arg1);
tree low_result
= fold_build2 (code, type,
eval_subst (arg0, cval1, minval,
cval2, maxval),
arg1);
/* All three of these results should be 0 or 1. Confirm they are.
Then use those values to select the proper code to use. */
if (TREE_CODE (high_result) == INTEGER_CST
&& TREE_CODE (equal_result) == INTEGER_CST
&& TREE_CODE (low_result) == INTEGER_CST)
{
/* Make a 3-bit mask with the high-order bit being the
value for `>', the next for '=', and the low for '<'. */
switch ((integer_onep (high_result) * 4)
+ (integer_onep (equal_result) * 2)
+ integer_onep (low_result))
{
case 0:
/* Always false. */
return omit_one_operand (type, integer_zero_node, arg0);
case 1:
code = LT_EXPR;
break;
case 2:
code = EQ_EXPR;
break;
case 3:
code = LE_EXPR;
break;
case 4:
code = GT_EXPR;
break;
case 5:
code = NE_EXPR;
break;
case 6:
code = GE_EXPR;
break;
case 7:
/* Always true. */
return omit_one_operand (type, integer_one_node, arg0);
}
if (save_p)
return save_expr (build2 (code, type, cval1, cval2));
return fold_build2 (code, type, cval1, cval2);
}
}
}
/* Fold a comparison of the address of COMPONENT_REFs with the same
type and component to a comparison of the address of the base
object. In short, &x->a OP &y->a to x OP y and
&x->a OP &y.a to x OP &y */
if (TREE_CODE (arg0) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
&& TREE_CODE (arg1) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
{
tree cref0 = TREE_OPERAND (arg0, 0);
tree cref1 = TREE_OPERAND (arg1, 0);
if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
{
tree op0 = TREE_OPERAND (cref0, 0);
tree op1 = TREE_OPERAND (cref1, 0);
return fold_build2 (code, type,
build_fold_addr_expr (op0),
build_fold_addr_expr (op1));
}
}
/* We can fold X/C1 op C2 where C1 and C2 are integer constants
into a single range test. */
if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
|| TREE_CODE (arg0) == EXACT_DIV_EXPR)
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !integer_zerop (TREE_OPERAND (arg0, 1))
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
&& !TREE_OVERFLOW (arg1))
{
tem = fold_div_compare (code, type, arg0, arg1);
if (tem != NULL_TREE)
return tem;
}
return NULL_TREE;
}
/* Fold a binary expression of code CODE and type TYPE with operands /* Fold a binary expression of code CODE and type TYPE with operands
OP0 and OP1. Return the folded expression if folding is OP0 and OP1. Return the folded expression if folding is
successful. Otherwise, return NULL_TREE. */ successful. Otherwise, return NULL_TREE. */
...@@ -7230,10 +7670,9 @@ fold_minmax (enum tree_code code, tree type, tree op0, tree op1) ...@@ -7230,10 +7670,9 @@ fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
tree tree
fold_binary (enum tree_code code, tree type, tree op0, tree op1) fold_binary (enum tree_code code, tree type, tree op0, tree op1)
{ {
tree t1 = NULL_TREE;
tree tem;
tree arg0 = NULL_TREE, arg1 = NULL_TREE;
enum tree_code_class kind = TREE_CODE_CLASS (code); enum tree_code_class kind = TREE_CODE_CLASS (code);
tree arg0, arg1, tem;
tree t1 = NULL_TREE;
gcc_assert (IS_EXPR_CODE_CLASS (kind) gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 2 && TREE_CODE_LENGTH (code) == 2
...@@ -9055,26 +9494,15 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ...@@ -9055,26 +9494,15 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
case EQ_EXPR: case EQ_EXPR:
case NE_EXPR: case NE_EXPR:
case LT_EXPR: tem = fold_comparison (code, type, op0, op1);
case GT_EXPR: if (tem != NULL_TREE)
case LE_EXPR: return tem;
case GE_EXPR:
/* If one arg is a real or integer constant, put it last. */
if (tree_swap_operands_p (arg0, arg1, true))
return fold_build2 (swap_tree_comparison (code), type, op1, op0);
/* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST
&& (code == NE_EXPR || code == EQ_EXPR))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
arg1));
/* bool_var != 0 becomes bool_var. */ /* bool_var != 0 becomes bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
&& code == NE_EXPR) && code == NE_EXPR)
return non_lvalue (fold_convert (type, arg0)); return non_lvalue (fold_convert (type, arg0));
/* bool_var == 1 becomes bool_var. */ /* bool_var == 1 becomes bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
&& code == EQ_EXPR) && code == EQ_EXPR)
...@@ -9090,10 +9518,16 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ...@@ -9090,10 +9518,16 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
&& code == EQ_EXPR) && code == EQ_EXPR)
return fold_build1 (TRUTH_NOT_EXPR, type, arg0); return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
/* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == INTEGER_CST)
return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
arg1));
/* If this is an equality comparison of the address of a non-weak /* If this is an equality comparison of the address of a non-weak
object against zero, then we know the result. */ object against zero, then we know the result. */
if ((code == EQ_EXPR || code == NE_EXPR) if (TREE_CODE (arg0) == ADDR_EXPR
&& TREE_CODE (arg0) == ADDR_EXPR
&& VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0)) && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
&& ! DECL_WEAK (TREE_OPERAND (arg0, 0)) && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
&& integer_zerop (arg1)) && integer_zerop (arg1))
...@@ -9102,8 +9536,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ...@@ -9102,8 +9536,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
/* If this is an equality comparison of the address of two non-weak, /* If this is an equality comparison of the address of two non-weak,
unaliased symbols neither of which are extern (since we do not unaliased symbols neither of which are extern (since we do not
have access to attributes for externs), then we know the result. */ have access to attributes for externs), then we know the result. */
if ((code == EQ_EXPR || code == NE_EXPR) if (TREE_CODE (arg0) == ADDR_EXPR
&& TREE_CODE (arg0) == ADDR_EXPR
&& VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0)) && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
&& ! DECL_WEAK (TREE_OPERAND (arg0, 0)) && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
&& ! lookup_attribute ("alias", && ! lookup_attribute ("alias",
...@@ -9133,65 +9566,252 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ...@@ -9133,65 +9566,252 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
type); type);
} }
/* If this is a comparison of two exprs that look like an /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
ARRAY_REF of the same object, then we can fold this to a a MINUS_EXPR of a constant, we can convert it into a comparison with
comparison of the two offsets. */ a revised constant as long as no overflow occurs. */
if (TREE_CODE_CLASS (code) == tcc_comparison) if (TREE_CODE (arg1) == INTEGER_CST
{ && (TREE_CODE (arg0) == PLUS_EXPR
tree base0, offset0, base1, offset1; || TREE_CODE (arg0) == MINUS_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
if (extract_array_ref (arg0, &base0, &offset0) && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
&& extract_array_ref (arg1, &base1, &offset1) ? MINUS_EXPR : PLUS_EXPR,
&& operand_equal_p (base0, base1, 0)) arg1, TREE_OPERAND (arg0, 1), 0))
{ && ! TREE_CONSTANT_OVERFLOW (tem))
/* Handle no offsets on both sides specially. */ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
if (offset0 == NULL_TREE
&& offset1 == NULL_TREE)
return fold_build2 (code, type, integer_zero_node,
integer_zero_node);
if (!offset0 || !offset1
|| TREE_TYPE (offset0) == TREE_TYPE (offset1))
{
if (offset0 == NULL_TREE)
offset0 = build_int_cst (TREE_TYPE (offset1), 0);
if (offset1 == NULL_TREE)
offset1 = build_int_cst (TREE_TYPE (offset0), 0);
return fold_build2 (code, type, offset0, offset1);
}
}
}
/* Transform comparisons of the form X +- C CMP X. */
if ((code != EQ_EXPR && code != NE_EXPR)
&& (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
&& !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
|| (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !TYPE_UNSIGNED (TREE_TYPE (arg1))
&& !(flag_wrapv || flag_trapv))))
{
tree arg01 = TREE_OPERAND (arg0, 1);
enum tree_code code0 = TREE_CODE (arg0);
int is_positive;
if (TREE_CODE (arg01) == REAL_CST) /* Similarly for a NEGATE_EXPR. */
is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1; if (TREE_CODE (arg0) == NEGATE_EXPR
else && TREE_CODE (arg1) == INTEGER_CST
is_positive = tree_int_cst_sgn (arg01); && 0 != (tem = negate_expr (arg1))
&& TREE_CODE (tem) == INTEGER_CST
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* (X - c) > X becomes false. */ /* If we have X - Y == 0, we can convert that to X == Y and similarly
if (code == GT_EXPR for !=. Don't do this for ordered comparisons due to overflow. */
&& ((code0 == MINUS_EXPR && is_positive >= 0) if (TREE_CODE (arg0) == MINUS_EXPR
|| (code0 == PLUS_EXPR && is_positive <= 0))) && integer_zerop (arg1))
return constant_boolean_node (0, type); return fold_build2 (code, type,
TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
/* Likewise (X + c) < X becomes false. */ /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
if (code == LT_EXPR if (TREE_CODE (arg0) == ABS_EXPR
&& ((code0 == PLUS_EXPR && is_positive >= 0) && (integer_zerop (arg1) || real_zerop (arg1)))
|| (code0 == MINUS_EXPR && is_positive <= 0))) return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
return constant_boolean_node (0, type);
/* If this is an EQ or NE comparison with zero and ARG0 is
(1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
two operations, but the latter can be done in one less insn
on machines that have only two-operand insns or on which a
constant cannot be the first operand. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_zerop (arg1))
{
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
if (TREE_CODE (arg00) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg00, 0)))
return
fold_build2 (code, type,
build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
arg01, TREE_OPERAND (arg00, 1)),
fold_convert (TREE_TYPE (arg0),
integer_one_node)),
arg1);
else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
return
fold_build2 (code, type,
build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
arg00, TREE_OPERAND (arg01, 1)),
fold_convert (TREE_TYPE (arg0),
integer_one_node)),
arg1);
}
/* If this is an NE or EQ comparison of zero against the result of a
signed MOD operation whose second operand is a power of 2, make
the MOD operation unsigned since it is simpler and equivalent. */
if (integer_zerop (arg1)
&& !TYPE_UNSIGNED (TREE_TYPE (arg0))
&& (TREE_CODE (arg0) == TRUNC_MOD_EXPR
|| TREE_CODE (arg0) == CEIL_MOD_EXPR
|| TREE_CODE (arg0) == FLOOR_MOD_EXPR
|| TREE_CODE (arg0) == ROUND_MOD_EXPR)
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
fold_convert (newtype,
TREE_OPERAND (arg0, 0)),
fold_convert (newtype,
TREE_OPERAND (arg0, 1)));
return fold_build2 (code, type, newmod,
fold_convert (newtype, arg1));
}
/* If this is an NE comparison of zero with an AND of one, remove the
comparison since the AND will give the correct value. */
if (code == NE_EXPR
&& integer_zerop (arg1)
&& TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (arg0, 1)))
return fold_convert (type, arg0);
/* If we have (A & C) == C where C is a power of 2, convert this into
(A & C) != 0. Similarly for NE_EXPR. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_pow2p (TREE_OPERAND (arg0, 1))
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
arg0, fold_convert (TREE_TYPE (arg0),
integer_zero_node));
/* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
bit, then fold the expression into A < 0 or A >= 0. */
tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
if (tem)
return tem;
/* If we have (A & C) == D where D & ~C != 0, convert this into 0.
Similarly for NE_EXPR. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
tree notc = fold_build1 (BIT_NOT_EXPR,
TREE_TYPE (TREE_OPERAND (arg0, 1)),
TREE_OPERAND (arg0, 1));
tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
arg1, notc);
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (dandnotc))
return omit_one_operand (type, rslt, arg0);
}
/* If we have (A | C) == D where C & ~D != 0, convert this into 0.
Similarly for NE_EXPR. */
if (TREE_CODE (arg0) == BIT_IOR_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
TREE_OPERAND (arg0, 1), notd);
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (candnotd))
return omit_one_operand (type, rslt, arg0);
}
/* If this is a comparison of a field, we may be able to simplify it. */
if (((TREE_CODE (arg0) == COMPONENT_REF
&& lang_hooks.can_use_bit_fields_p ())
|| TREE_CODE (arg0) == BIT_FIELD_REF)
/* Handle the constant case even without -O
to make sure the warnings are given. */
&& (optimize || TREE_CODE (arg1) == INTEGER_CST))
{
t1 = optimize_bit_field_compare (code, type, arg0, arg1);
if (t1)
return t1;
}
/* Optimize comparisons of strlen vs zero to a compare of the
first character of the string vs zero. To wit,
strlen(ptr) == 0 => *ptr == 0
strlen(ptr) != 0 => *ptr != 0
Other cases should reduce to one of these two (or a constant)
due to the return value of strlen being unsigned. */
if (TREE_CODE (arg0) == CALL_EXPR
&& integer_zerop (arg1))
{
tree fndecl = get_callee_fndecl (arg0);
tree arglist;
if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
&& (arglist = TREE_OPERAND (arg0, 1))
&& TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
&& ! TREE_CHAIN (arglist))
{
tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
return fold_build2 (code, type, iref,
build_int_cst (TREE_TYPE (iref), 0));
}
}
/* Fold (X >> C) != 0 into X < 0 if C is one less than the width
of X. Similarly fold (X >> C) == 0 into X >= 0. */
if (TREE_CODE (arg0) == RSHIFT_EXPR
&& integer_zerop (arg1)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
tree itype = TREE_TYPE (arg00);
if (TREE_INT_CST_HIGH (arg01) == 0
&& TREE_INT_CST_LOW (arg01)
== (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
{
if (TYPE_UNSIGNED (itype))
{
itype = lang_hooks.types.signed_type (itype);
arg00 = fold_convert (itype, arg00);
}
return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
type, arg00, build_int_cst (itype, 0));
}
}
if (integer_zerop (arg1)
&& tree_expr_nonzero_p (arg0))
{
tree res = constant_boolean_node (code==NE_EXPR, type);
return omit_one_operand (type, res, arg0);
}
return NULL_TREE;
case LT_EXPR:
case GT_EXPR:
case LE_EXPR:
case GE_EXPR:
tem = fold_comparison (code, type, op0, op1);
if (tem != NULL_TREE)
return tem;
/* Transform comparisons of the form X +- C CMP X. */
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
&& !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
|| (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !TYPE_UNSIGNED (TREE_TYPE (arg1))
&& !(flag_wrapv || flag_trapv))))
{
tree arg01 = TREE_OPERAND (arg0, 1);
enum tree_code code0 = TREE_CODE (arg0);
int is_positive;
if (TREE_CODE (arg01) == REAL_CST)
is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
else
is_positive = tree_int_cst_sgn (arg01);
/* (X - c) > X becomes false. */
if (code == GT_EXPR
&& ((code0 == MINUS_EXPR && is_positive >= 0)
|| (code0 == PLUS_EXPR && is_positive <= 0)))
return constant_boolean_node (0, type);
/* Likewise (X + c) < X becomes false. */
if (code == LT_EXPR
&& ((code0 == PLUS_EXPR && is_positive >= 0)
|| (code0 == MINUS_EXPR && is_positive <= 0)))
return constant_boolean_node (0, type);
/* Convert (X - c) <= X to true. */ /* Convert (X - c) <= X to true. */
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))) if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
...@@ -9233,194 +9853,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ...@@ -9233,194 +9853,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
} }
} }
/* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
&& !TYPE_UNSIGNED (TREE_TYPE (arg1))
&& !(flag_wrapv || flag_trapv))
&& (TREE_CODE (arg1) == INTEGER_CST
&& !TREE_OVERFLOW (arg1)))
{
tree const1 = TREE_OPERAND (arg0, 1);
tree const2 = arg1;
tree variable = TREE_OPERAND (arg0, 0);
tree lhs;
int lhs_add;
lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
TREE_TYPE (arg1), const2, const1);
if (TREE_CODE (lhs) == TREE_CODE (arg1)
&& (TREE_CODE (lhs) != INTEGER_CST
|| !TREE_OVERFLOW (lhs)))
return fold_build2 (code, type, variable, lhs);
}
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree targ0 = strip_float_extensions (arg0);
tree targ1 = strip_float_extensions (arg1);
tree newtype = TREE_TYPE (targ0);
if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
newtype = TREE_TYPE (targ1);
/* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
return fold_build2 (code, type, fold_convert (newtype, targ0),
fold_convert (newtype, targ1));
/* (-a) CMP (-b) -> b CMP a */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& TREE_CODE (arg1) == NEGATE_EXPR)
return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
TREE_OPERAND (arg0, 0));
if (TREE_CODE (arg1) == REAL_CST)
{
REAL_VALUE_TYPE cst;
cst = TREE_REAL_CST (arg1);
/* (-a) CMP CST -> a swap(CMP) (-CST) */
if (TREE_CODE (arg0) == NEGATE_EXPR)
return
fold_build2 (swap_tree_comparison (code), type,
TREE_OPERAND (arg0, 0),
build_real (TREE_TYPE (arg1),
REAL_VALUE_NEGATE (cst)));
/* IEEE doesn't distinguish +0 and -0 in comparisons. */
/* a CMP (-0) -> a CMP 0 */
if (REAL_VALUE_MINUS_ZERO (cst))
return fold_build2 (code, type, arg0,
build_real (TREE_TYPE (arg1), dconst0));
/* x != NaN is always true, other ops are always false. */
if (REAL_VALUE_ISNAN (cst)
&& ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
{
tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
return omit_one_operand (type, tem, arg0);
}
/* Fold comparisons against infinity. */
if (REAL_VALUE_ISINF (cst))
{
tem = fold_inf_compare (code, type, arg0, arg1);
if (tem != NULL_TREE)
return tem;
}
}
/* If this is a comparison of a real constant with a PLUS_EXPR
or a MINUS_EXPR of a real constant, we can convert it into a
comparison with a revised real constant as long as no overflow
occurs when unsafe_math_optimizations are enabled. */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg1) == REAL_CST
&& (TREE_CODE (arg0) == PLUS_EXPR
|| TREE_CODE (arg0) == MINUS_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
&& 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
? MINUS_EXPR : PLUS_EXPR,
arg1, TREE_OPERAND (arg0, 1), 0))
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* Likewise, we can simplify a comparison of a real constant with
a MINUS_EXPR whose first operand is also a real constant, i.e.
(c1 - x) < c2 becomes x > c1-c2. */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg1) == REAL_CST
&& TREE_CODE (arg0) == MINUS_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
&& 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
arg1, 0))
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (swap_tree_comparison (code), type,
TREE_OPERAND (arg0, 1), tem);
/* Fold comparisons against built-in math functions. */
if (TREE_CODE (arg1) == REAL_CST
&& flag_unsafe_math_optimizations
&& ! flag_errno_math)
{
enum built_in_function fcode = builtin_mathfn_code (arg0);
if (fcode != END_BUILTINS)
{
tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
if (tem != NULL_TREE)
return tem;
}
}
}
/* Convert foo++ == CONST into ++foo == CONST + INCR. */
if (TREE_CONSTANT (arg1)
&& (TREE_CODE (arg0) == POSTINCREMENT_EXPR
|| TREE_CODE (arg0) == POSTDECREMENT_EXPR)
/* This optimization is invalid for ordered comparisons
if CONST+INCR overflows or if foo+incr might overflow.
This optimization is invalid for floating point due to rounding.
For pointer types we assume overflow doesn't happen. */
&& (POINTER_TYPE_P (TREE_TYPE (arg0))
|| (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
&& (code == EQ_EXPR || code == NE_EXPR))))
{
tree varop, newconst;
if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
{
newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
arg1, TREE_OPERAND (arg0, 1));
varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
}
else
{
newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
arg1, TREE_OPERAND (arg0, 1));
varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
}
/* If VAROP is a reference to a bitfield, we must mask
the constant by the width of the field. */
if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
&& DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
&& host_integerp (DECL_SIZE (TREE_OPERAND
(TREE_OPERAND (varop, 0), 1)), 1))
{
tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
tree folded_compare, shift;
/* First check whether the comparison would come out
always the same. If we don't do that we would
change the meaning with the masking. */
folded_compare = fold_build2 (code, type,
TREE_OPERAND (varop, 0), arg1);
if (integer_zerop (folded_compare)
|| integer_onep (folded_compare))
return omit_one_operand (type, folded_compare, varop);
shift = build_int_cst (NULL_TREE,
TYPE_PRECISION (TREE_TYPE (varop)) - size);
shift = fold_convert (TREE_TYPE (varop), shift);
newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
newconst, shift);
newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
newconst, shift);
}
return fold_build2 (code, type, varop, newconst);
}
/* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0. /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
This transformation affects the cases which are handled in later This transformation affects the cases which are handled in later
optimizations involving comparisons with non-negative constants. */ optimizations involving comparisons with non-negative constants. */
...@@ -9428,22 +9860,19 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ...@@ -9428,22 +9860,19 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
&& TREE_CODE (arg0) != INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST
&& tree_int_cst_sgn (arg1) > 0) && tree_int_cst_sgn (arg1) > 0)
{ {
switch (code) if (code == GE_EXPR)
{ {
case GE_EXPR:
arg1 = const_binop (MINUS_EXPR, arg1, arg1 = const_binop (MINUS_EXPR, arg1,
build_int_cst (TREE_TYPE (arg1), 1), 0); build_int_cst (TREE_TYPE (arg1), 1), 0);
return fold_build2 (GT_EXPR, type, arg0, return fold_build2 (GT_EXPR, type, arg0,
fold_convert (TREE_TYPE (arg0), arg1)); fold_convert (TREE_TYPE (arg0), arg1));
}
case LT_EXPR: if (code == LT_EXPR)
{
arg1 = const_binop (MINUS_EXPR, arg1, arg1 = const_binop (MINUS_EXPR, arg1,
build_int_cst (TREE_TYPE (arg1), 1), 0); build_int_cst (TREE_TYPE (arg1), 1), 0);
return fold_build2 (LE_EXPR, type, arg0, return fold_build2 (LE_EXPR, type, arg0,
fold_convert (TREE_TYPE (arg0), arg1)); fold_convert (TREE_TYPE (arg0), arg1));
default:
break;
} }
} }
...@@ -9596,78 +10025,18 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ...@@ -9596,78 +10025,18 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
} }
} }
/* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
a MINUS_EXPR of a constant, we can convert it into a comparison with
a revised constant as long as no overflow occurs. */
if ((code == EQ_EXPR || code == NE_EXPR)
&& TREE_CODE (arg1) == INTEGER_CST
&& (TREE_CODE (arg0) == PLUS_EXPR
|| TREE_CODE (arg0) == MINUS_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
? MINUS_EXPR : PLUS_EXPR,
arg1, TREE_OPERAND (arg0, 1), 0))
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* Similarly for a NEGATE_EXPR. */
else if ((code == EQ_EXPR || code == NE_EXPR)
&& TREE_CODE (arg0) == NEGATE_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = negate_expr (arg1))
&& TREE_CODE (tem) == INTEGER_CST
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* If we have X - Y == 0, we can convert that to X == Y and similarly
for !=. Don't do this for ordered comparisons due to overflow. */
else if ((code == NE_EXPR || code == EQ_EXPR)
&& integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
return fold_build2 (code, type,
TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
&& (TREE_CODE (arg0) == NOP_EXPR
|| TREE_CODE (arg0) == CONVERT_EXPR))
{
/* If we are widening one operand of an integer comparison,
see if the other operand is similarly being widened. Perhaps we
can do the comparison in the narrower type. */
tem = fold_widened_comparison (code, type, arg0, arg1);
if (tem)
return tem;
/* Or if we are changing signedness. */
tem = fold_sign_changed_comparison (code, type, arg0, arg1);
if (tem)
return tem;
}
/* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
constant, we can simplify it. */
else if (TREE_CODE (arg1) == INTEGER_CST
&& (TREE_CODE (arg0) == MIN_EXPR
|| TREE_CODE (arg0) == MAX_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
tem = optimize_minmax_comparison (code, type, op0, op1);
if (tem)
return tem;
return NULL_TREE;
}
/* If we are comparing an ABS_EXPR with a constant, we can /* If we are comparing an ABS_EXPR with a constant, we can
convert all the cases into explicit comparisons, but they may convert all the cases into explicit comparisons, but they may
well not be faster than doing the ABS and one comparison. well not be faster than doing the ABS and one comparison.
But ABS (X) <= C is a range comparison, which becomes a subtraction But ABS (X) <= C is a range comparison, which becomes a subtraction
and a comparison, and is probably faster. */ and a comparison, and is probably faster. */
else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST if (code == LE_EXPR
&& TREE_CODE (arg0) == ABS_EXPR && TREE_CODE (arg1) == INTEGER_CST
&& ! TREE_SIDE_EFFECTS (arg0) && TREE_CODE (arg0) == ABS_EXPR
&& (0 != (tem = negate_expr (arg1))) && ! TREE_SIDE_EFFECTS (arg0)
&& TREE_CODE (tem) == INTEGER_CST && (0 != (tem = negate_expr (arg1)))
&& ! TREE_CONSTANT_OVERFLOW (tem)) && TREE_CODE (tem) == INTEGER_CST
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (TRUTH_ANDIF_EXPR, type, return fold_build2 (TRUTH_ANDIF_EXPR, type,
build2 (GE_EXPR, type, build2 (GE_EXPR, type,
TREE_OPERAND (arg0, 0), tem), TREE_OPERAND (arg0, 0), tem),
...@@ -9675,135 +10044,19 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ...@@ -9675,135 +10044,19 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
TREE_OPERAND (arg0, 0), arg1)); TREE_OPERAND (arg0, 0), arg1));
/* Convert ABS_EXPR<x> >= 0 to true. */ /* Convert ABS_EXPR<x> >= 0 to true. */
else if (code == GE_EXPR if (code == GE_EXPR
&& tree_expr_nonnegative_p (arg0) && tree_expr_nonnegative_p (arg0)
&& (integer_zerop (arg1) && (integer_zerop (arg1)
|| (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
&& real_zerop (arg1)))) && real_zerop (arg1))))
return omit_one_operand (type, integer_one_node, arg0); return omit_one_operand (type, integer_one_node, arg0);
/* Convert ABS_EXPR<x> < 0 to false. */ /* Convert ABS_EXPR<x> < 0 to false. */
else if (code == LT_EXPR if (code == LT_EXPR
&& tree_expr_nonnegative_p (arg0) && tree_expr_nonnegative_p (arg0)
&& (integer_zerop (arg1) || real_zerop (arg1))) && (integer_zerop (arg1) || real_zerop (arg1)))
return omit_one_operand (type, integer_zero_node, arg0); return omit_one_operand (type, integer_zero_node, arg0);
/* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
else if ((code == EQ_EXPR || code == NE_EXPR)
&& TREE_CODE (arg0) == ABS_EXPR
&& (integer_zerop (arg1) || real_zerop (arg1)))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
/* If this is an EQ or NE comparison with zero and ARG0 is
(1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
two operations, but the latter can be done in one less insn
on machines that have only two-operand insns or on which a
constant cannot be the first operand. */
if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
&& TREE_CODE (arg0) == BIT_AND_EXPR)
{
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
if (TREE_CODE (arg00) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg00, 0)))
return
fold_build2 (code, type,
build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
arg01, TREE_OPERAND (arg00, 1)),
fold_convert (TREE_TYPE (arg0),
integer_one_node)),
arg1);
else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
return
fold_build2 (code, type,
build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
arg00, TREE_OPERAND (arg01, 1)),
fold_convert (TREE_TYPE (arg0),
integer_one_node)),
arg1);
}
/* If this is an NE or EQ comparison of zero against the result of a
signed MOD operation whose second operand is a power of 2, make
the MOD operation unsigned since it is simpler and equivalent. */
if ((code == NE_EXPR || code == EQ_EXPR)
&& integer_zerop (arg1)
&& !TYPE_UNSIGNED (TREE_TYPE (arg0))
&& (TREE_CODE (arg0) == TRUNC_MOD_EXPR
|| TREE_CODE (arg0) == CEIL_MOD_EXPR
|| TREE_CODE (arg0) == FLOOR_MOD_EXPR
|| TREE_CODE (arg0) == ROUND_MOD_EXPR)
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
fold_convert (newtype,
TREE_OPERAND (arg0, 0)),
fold_convert (newtype,
TREE_OPERAND (arg0, 1)));
return fold_build2 (code, type, newmod,
fold_convert (newtype, arg1));
}
/* If this is an NE comparison of zero with an AND of one, remove the
comparison since the AND will give the correct value. */
if (code == NE_EXPR && integer_zerop (arg1)
&& TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (arg0, 1)))
return fold_convert (type, arg0);
/* If we have (A & C) == C where C is a power of 2, convert this into
(A & C) != 0. Similarly for NE_EXPR. */
if ((code == EQ_EXPR || code == NE_EXPR)
&& TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_pow2p (TREE_OPERAND (arg0, 1))
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
arg0, fold_convert (TREE_TYPE (arg0),
integer_zero_node));
/* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
bit, then fold the expression into A < 0 or A >= 0. */
tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
if (tem)
return tem;
/* If we have (A & C) == D where D & ~C != 0, convert this into 0.
Similarly for NE_EXPR. */
if ((code == EQ_EXPR || code == NE_EXPR)
&& TREE_CODE (arg0) == BIT_AND_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
tree notc = fold_build1 (BIT_NOT_EXPR,
TREE_TYPE (TREE_OPERAND (arg0, 1)),
TREE_OPERAND (arg0, 1));
tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
arg1, notc);
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (dandnotc))
return omit_one_operand (type, rslt, arg0);
}
/* If we have (A | C) == D where C & ~D != 0, convert this into 0.
Similarly for NE_EXPR. */
if ((code == EQ_EXPR || code == NE_EXPR)
&& TREE_CODE (arg0) == BIT_IOR_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
TREE_OPERAND (arg0, 1), notd);
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (candnotd))
return omit_one_operand (type, rslt, arg0);
}
/* If X is unsigned, convert X < (1 << Y) into X >> Y == 0 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
and similarly for >= into !=. */ and similarly for >= into !=. */
if ((code == LT_EXPR || code == GE_EXPR) if ((code == LT_EXPR || code == GE_EXPR)
...@@ -9815,12 +10068,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ...@@ -9815,12 +10068,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
TREE_OPERAND (arg1, 1)), TREE_OPERAND (arg1, 1)),
build_int_cst (TREE_TYPE (arg0), 0)); build_int_cst (TREE_TYPE (arg0), 0));
else if ((code == LT_EXPR || code == GE_EXPR) if ((code == LT_EXPR || code == GE_EXPR)
&& TYPE_UNSIGNED (TREE_TYPE (arg0)) && TYPE_UNSIGNED (TREE_TYPE (arg0))
&& (TREE_CODE (arg1) == NOP_EXPR && (TREE_CODE (arg1) == NOP_EXPR
|| TREE_CODE (arg1) == CONVERT_EXPR) || TREE_CODE (arg1) == CONVERT_EXPR)
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0))) && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
return return
build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
fold_convert (TREE_TYPE (arg0), fold_convert (TREE_TYPE (arg0),
...@@ -9829,253 +10082,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ...@@ -9829,253 +10082,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
1))), 1))),
build_int_cst (TREE_TYPE (arg0), 0)); build_int_cst (TREE_TYPE (arg0), 0));
/* Simplify comparison of something with itself. (For IEEE return NULL_TREE;
floating-point, we can only do some of these simplifications.) */
if (operand_equal_p (arg0, arg1, 0))
{
switch (code)
{
case EQ_EXPR:
if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
|| ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
return constant_boolean_node (1, type);
break;
case GE_EXPR:
case LE_EXPR:
if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
|| ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
return constant_boolean_node (1, type);
return fold_build2 (EQ_EXPR, type, arg0, arg1);
case NE_EXPR:
/* For NE, we can only do this simplification if integer
or we don't honor IEEE floating point NaNs. */
if (FLOAT_TYPE_P (TREE_TYPE (arg0))
&& HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
break;
/* ... fall through ... */
case GT_EXPR:
case LT_EXPR:
return constant_boolean_node (0, type);
default:
gcc_unreachable ();
}
}
/* If we are comparing an expression that just has comparisons
of two integer values, arithmetic expressions of those comparisons,
and constants, we can simplify it. There are only three cases
to check: the two values can either be equal, the first can be
greater, or the second can be greater. Fold the expression for
those three values. Since each value must be 0 or 1, we have
eight possibilities, each of which corresponds to the constant 0
or 1 or one of the six possible comparisons.
This handles common cases like (a > b) == 0 but also handles
expressions like ((x > y) - (y > x)) > 0, which supposedly
occur in macroized code. */
if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
{
tree cval1 = 0, cval2 = 0;
int save_p = 0;
if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
/* Don't handle degenerate cases here; they should already
have been handled anyway. */
&& cval1 != 0 && cval2 != 0
&& ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
&& TREE_TYPE (cval1) == TREE_TYPE (cval2)
&& INTEGRAL_TYPE_P (TREE_TYPE (cval1))
&& TYPE_MAX_VALUE (TREE_TYPE (cval1))
&& TYPE_MAX_VALUE (TREE_TYPE (cval2))
&& ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
{
tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
/* We can't just pass T to eval_subst in case cval1 or cval2
was the same as ARG1. */
tree high_result
= fold_build2 (code, type,
eval_subst (arg0, cval1, maxval,
cval2, minval),
arg1);
tree equal_result
= fold_build2 (code, type,
eval_subst (arg0, cval1, maxval,
cval2, maxval),
arg1);
tree low_result
= fold_build2 (code, type,
eval_subst (arg0, cval1, minval,
cval2, maxval),
arg1);
/* All three of these results should be 0 or 1. Confirm they
are. Then use those values to select the proper code
to use. */
if ((integer_zerop (high_result)
|| integer_onep (high_result))
&& (integer_zerop (equal_result)
|| integer_onep (equal_result))
&& (integer_zerop (low_result)
|| integer_onep (low_result)))
{
/* Make a 3-bit mask with the high-order bit being the
value for `>', the next for '=', and the low for '<'. */
switch ((integer_onep (high_result) * 4)
+ (integer_onep (equal_result) * 2)
+ integer_onep (low_result))
{
case 0:
/* Always false. */
return omit_one_operand (type, integer_zero_node, arg0);
case 1:
code = LT_EXPR;
break;
case 2:
code = EQ_EXPR;
break;
case 3:
code = LE_EXPR;
break;
case 4:
code = GT_EXPR;
break;
case 5:
code = NE_EXPR;
break;
case 6:
code = GE_EXPR;
break;
case 7:
/* Always true. */
return omit_one_operand (type, integer_one_node, arg0);
}
if (save_p)
return save_expr (build2 (code, type, cval1, cval2));
else
return fold_build2 (code, type, cval1, cval2);
}
}
}
/* If this is a comparison of a field, we may be able to simplify it. */
if (((TREE_CODE (arg0) == COMPONENT_REF
&& lang_hooks.can_use_bit_fields_p ())
|| TREE_CODE (arg0) == BIT_FIELD_REF)
&& (code == EQ_EXPR || code == NE_EXPR)
/* Handle the constant case even without -O
to make sure the warnings are given. */
&& (optimize || TREE_CODE (arg1) == INTEGER_CST))
{
t1 = optimize_bit_field_compare (code, type, arg0, arg1);
if (t1)
return t1;
}
/* Fold a comparison of the address of COMPONENT_REFs with the same
type and component to a comparison of the address of the base
object. In short, &x->a OP &y->a to x OP y and
&x->a OP &y.a to x OP &y */
if (TREE_CODE (arg0) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
&& TREE_CODE (arg1) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
{
tree cref0 = TREE_OPERAND (arg0, 0);
tree cref1 = TREE_OPERAND (arg1, 0);
if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
{
tree op0 = TREE_OPERAND (cref0, 0);
tree op1 = TREE_OPERAND (cref1, 0);
return fold_build2 (code, type,
build_fold_addr_expr (op0),
build_fold_addr_expr (op1));
}
}
/* Optimize comparisons of strlen vs zero to a compare of the
first character of the string vs zero. To wit,
strlen(ptr) == 0 => *ptr == 0
strlen(ptr) != 0 => *ptr != 0
Other cases should reduce to one of these two (or a constant)
due to the return value of strlen being unsigned. */
if ((code == EQ_EXPR || code == NE_EXPR)
&& integer_zerop (arg1)
&& TREE_CODE (arg0) == CALL_EXPR)
{
tree fndecl = get_callee_fndecl (arg0);
tree arglist;
if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
&& (arglist = TREE_OPERAND (arg0, 1))
&& TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
&& ! TREE_CHAIN (arglist))
{
tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
return fold_build2 (code, type, iref,
build_int_cst (TREE_TYPE (iref), 0));
}
}
/* We can fold X/C1 op C2 where C1 and C2 are integer constants
into a single range test. */
if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
|| TREE_CODE (arg0) == EXACT_DIV_EXPR)
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !integer_zerop (TREE_OPERAND (arg0, 1))
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
&& !TREE_OVERFLOW (arg1))
{
t1 = fold_div_compare (code, type, arg0, arg1);
if (t1 != NULL_TREE)
return t1;
}
/* Fold (X >> C) != 0 into X < 0 if C is one less than the width
of X. Similarly fold (X >> C) == 0 into X >= 0. */
if ((code == EQ_EXPR || code == NE_EXPR)
&& integer_zerop (arg1)
&& TREE_CODE (arg0) == RSHIFT_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
tree itype = TREE_TYPE (arg00);
if (TREE_INT_CST_HIGH (arg01) == 0
&& TREE_INT_CST_LOW (arg01)
== (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
{
if (TYPE_UNSIGNED (itype))
{
itype = lang_hooks.types.signed_type (itype);
arg00 = fold_convert (itype, arg00);
}
return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
type, arg00, build_int_cst (itype, 0));
}
}
if ((code == EQ_EXPR || code == NE_EXPR)
&& integer_zerop (arg1)
&& tree_expr_nonzero_p (arg0))
{
tree res = constant_boolean_node (code==NE_EXPR, type);
return omit_one_operand (type, res, arg0);
}
t1 = fold_relational_const (code, type, arg0, arg1);
return t1 == NULL_TREE ? NULL_TREE : t1;
case UNORDERED_EXPR: case UNORDERED_EXPR:
case ORDERED_EXPR: case ORDERED_EXPR:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment