Commit ac5a28a6 by Jan Hubicka Committed by Jan Hubicka

fold-const.c (fold_unary): Convert (T1)(X op Y) into ((T1)X op (T1)Y)...

	* fold-const.c (fold_unary): Convert (T1)(X op Y) into ((T1)X op (T1)Y),
	for pointer type in more cases than before.

	* gimplify.c (gimplify_expr): Fold (void *)&a + 4.

	* tree-object-size.c (plus_expr_object_size): When operand size is
	unknown, return unknown.

From-SVN: r124403
parent 2c405565
2007-05-03 Jan Hubicka <jh@suse.cz>
* fold-const.c (fold_unary): Convert (T1)(X op Y) into ((T1)X op (T1)Y),
for pointer type in more cases than before.
* gimplify.c (gimplify_expr): Fold (void *)&a + 4.
* tree-object-size.c (plus_expr_object_size): When operand size is
unknown, return unknown.
2007-05-03 Dirk Mueller <dmueller@suse.de> 2007-05-03 Dirk Mueller <dmueller@suse.de>
* doc/invoke.texi (-m386,-m486,-mpentium,-mpentiumpro): Remove. * doc/invoke.texi (-m386,-m486,-mpentium,-mpentiumpro): Remove.
......
...@@ -7814,24 +7814,20 @@ fold_unary (enum tree_code code, tree type, tree op0) ...@@ -7814,24 +7814,20 @@ fold_unary (enum tree_code code, tree type, tree op0)
} }
} }
/* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and /* Convert (T1)(X op Y) into ((T1)X op (T1)Y), for pointer type,
T2 being pointers to types of the same size. */ when one of the new casts will fold away. Conservatively we assume
if (POINTER_TYPE_P (type) that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (arg0))
&& BINARY_CLASS_P (arg0) && BINARY_CLASS_P (arg0)
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))) || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
|| TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
{ {
tree arg00 = TREE_OPERAND (arg0, 0); tree arg00 = TREE_OPERAND (arg0, 0);
tree t0 = type; tree arg01 = TREE_OPERAND (arg0, 1);
tree t1 = TREE_TYPE (arg00);
tree tt0 = TREE_TYPE (t0); return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
tree tt1 = TREE_TYPE (t1); fold_convert (type, arg01));
tree s0 = TYPE_SIZE (tt0);
tree s1 = TYPE_SIZE (tt1);
if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
TREE_OPERAND (arg0, 1));
} }
/* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
......
...@@ -5883,6 +5883,21 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, ...@@ -5883,6 +5883,21 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
TREE_TYPE (*expr_p)); TREE_TYPE (*expr_p));
break; break;
} }
/* Convert (void *)&a + 4 into (void *)&a[1]. */
if (POINTER_TYPE_P (TREE_TYPE (*expr_p))
&& TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
&& TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
&& (tmp = maybe_fold_offset_to_reference
(TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
TREE_OPERAND (*expr_p, 1),
TREE_TYPE (TREE_TYPE
(TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
0))))))
{
tmp = build_fold_addr_expr (tmp);
*expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
break;
}
/* FALLTHRU */ /* FALLTHRU */
default: default:
switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
......
...@@ -588,7 +588,9 @@ plus_expr_object_size (struct object_size_info *osi, tree var, tree value) ...@@ -588,7 +588,9 @@ plus_expr_object_size (struct object_size_info *osi, tree var, tree value)
unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1); unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
bytes = compute_builtin_object_size (op0, object_size_type); bytes = compute_builtin_object_size (op0, object_size_type);
if (off > offset_limit) if (bytes == unknown[object_size_type])
;
else if (off > offset_limit)
bytes = unknown[object_size_type]; bytes = unknown[object_size_type];
else if (off > bytes) else if (off > bytes)
bytes = 0; bytes = 0;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment