Commit a5e0cd1d by Marc Glisse Committed by Marc Glisse

stor-layout.c (element_precision): New function.

2013-05-10  Marc Glisse  <marc.glisse@inria.fr>

gcc/
	* stor-layout.c (element_precision): New function.
	* machmode.h (element_precision): Declare it.
	* tree.c (build_minus_one_cst): New function.
	(element_precision): Likewise.
	* tree.h (build_minus_one_cst): Declare new function.
	(element_precision): Likewise.
	* fold-const.c (operand_equal_p): Use element_precision.
	(fold_binary_loc): Handle vector types.
	* convert.c (convert_to_integer): Use element_precision.
	* gimple.c (iterative_hash_canonical_type): Handle complex and vectors
	separately.

gcc/c-family/
	* c-common.c (vector_types_convertible_p): No TYPE_PRECISION for
	vectors.

gcc/testsuite/
	* gcc.dg/vector-shift.c: New testcase.

From-SVN: r198772
parent cb2558bc
2013-05-10 Marc Glisse <marc.glisse@inria.fr>
* stor-layout.c (element_precision): New function.
* machmode.h (element_precision): Declare it.
* tree.c (build_minus_one_cst): New function.
(element_precision): Likewise.
* tree.h (build_minus_one_cst): Declare new function.
(element_precision): Likewise.
* fold-const.c (operand_equal_p): Use element_precision.
(fold_binary_loc): Handle vector types.
* convert.c (convert_to_integer): Use element_precision.
* gimple.c (iterative_hash_canonical_type): Handle complex and vectors
separately.
2013-05-10 Richard Sandiford <rdsandiford@googlemail.com>
* config/mips/mips-protos.h (m16_uimm3_b, m16_simm4_1, m16_nsimm4_1)
......
2013-05-10 Marc Glisse <marc.glisse@inria.fr>
* c-common.c (vector_types_convertible_p): No TYPE_PRECISION for
vectors.
2013-05-07 Han Shen <shenhan@google.com>
* c-cppbuiltin.c (c_cpp_builtins): Added "__SSP_STRONG__=3".
......
......@@ -2227,7 +2227,7 @@ vector_types_convertible_p (const_tree t1, const_tree t2, bool emit_lax_note)
convertible_lax =
(tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2))
&& (TREE_CODE (TREE_TYPE (t1)) != REAL_TYPE ||
TYPE_PRECISION (t1) == TYPE_PRECISION (t2))
TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2))
&& (INTEGRAL_TYPE_P (TREE_TYPE (t1))
== INTEGRAL_TYPE_P (TREE_TYPE (t2))));
......
......@@ -355,8 +355,8 @@ convert_to_integer (tree type, tree expr)
{
enum tree_code ex_form = TREE_CODE (expr);
tree intype = TREE_TYPE (expr);
unsigned int inprec = TYPE_PRECISION (intype);
unsigned int outprec = TYPE_PRECISION (type);
unsigned int inprec = element_precision (intype);
unsigned int outprec = element_precision (type);
/* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
be. Consider `enum E = { a, b = (enum E) 3 };'. */
......
......@@ -2445,7 +2445,8 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
/* If both types don't have the same precision, then it is not safe
to strip NOPs. */
if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
if (element_precision (TREE_TYPE (arg0))
!= element_precision (TREE_TYPE (arg1)))
return 0;
STRIP_NOPS (arg0);
......@@ -9877,6 +9878,7 @@ fold_binary_loc (location_t loc,
tree arg0, arg1, tem;
tree t1 = NULL_TREE;
bool strict_overflow_p;
unsigned int prec;
gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 2
......@@ -10147,7 +10149,7 @@ fold_binary_loc (location_t loc,
STRIP_NOPS (tem);
if (operand_equal_p (tem, arg1, 0))
{
t1 = build_int_cst_type (type, -1);
t1 = build_minus_one_cst (type);
return omit_one_operand_loc (loc, type, t1, arg1);
}
}
......@@ -10161,7 +10163,7 @@ fold_binary_loc (location_t loc,
STRIP_NOPS (tem);
if (operand_equal_p (arg0, tem, 0))
{
t1 = build_int_cst_type (type, -1);
t1 = build_minus_one_cst (type);
return omit_one_operand_loc (loc, type, t1, arg0);
}
}
......@@ -10387,7 +10389,8 @@ fold_binary_loc (location_t loc,
TYPE_UNSIGNED (rtype))
/* Only create rotates in complete modes. Other cases are not
expanded properly. */
&& TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
&& (element_precision (rtype)
== element_precision (TYPE_MODE (rtype))))
{
tree tree01, tree11;
enum tree_code code01, code11;
......@@ -10403,7 +10406,7 @@ fold_binary_loc (location_t loc,
&& TREE_INT_CST_HIGH (tree01) == 0
&& TREE_INT_CST_HIGH (tree11) == 0
&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
== element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
{
tem = build2_loc (loc, LROTATE_EXPR,
TREE_TYPE (TREE_OPERAND (arg0, 0)),
......@@ -10420,7 +10423,7 @@ fold_binary_loc (location_t loc,
STRIP_NOPS (tree111);
if (TREE_CODE (tree110) == INTEGER_CST
&& 0 == compare_tree_int (tree110,
TYPE_PRECISION
element_precision
(TREE_TYPE (TREE_OPERAND
(arg0, 0))))
&& operand_equal_p (tree01, tree111, 0))
......@@ -10441,7 +10444,7 @@ fold_binary_loc (location_t loc,
STRIP_NOPS (tree011);
if (TREE_CODE (tree010) == INTEGER_CST
&& 0 == compare_tree_int (tree010,
TYPE_PRECISION
element_precision
(TREE_TYPE (TREE_OPERAND
(arg0, 0))))
&& operand_equal_p (tree11, tree011, 0))
......@@ -11757,8 +11760,7 @@ fold_binary_loc (location_t loc,
if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
&& TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
{
unsigned int prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
&& (~TREE_INT_CST_LOW (arg1)
......@@ -11826,7 +11828,7 @@ fold_binary_loc (location_t loc,
&& TYPE_PRECISION (TREE_TYPE (arg0))
== GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
{
unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
prec = TYPE_PRECISION (TREE_TYPE (arg0));
tree arg00 = TREE_OPERAND (arg0, 0);
/* See if more bits can be proven as zero because of
zero extension. */
......@@ -11869,8 +11871,6 @@ fold_binary_loc (location_t loc,
newmask = mask | zerobits;
if (newmask != mask && (newmask & (newmask + 1)) == 0)
{
unsigned int prec;
/* Only do the transformation if NEWMASK is some integer
mode's mask. */
for (prec = BITS_PER_UNIT;
......@@ -12414,30 +12414,32 @@ fold_binary_loc (location_t loc,
if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
return NULL_TREE;
prec = element_precision (type);
/* Turn (a OP c1) OP c2 into a OP (c1+c2). */
if (TREE_CODE (op0) == code && host_integerp (arg1, false)
&& TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
&& TREE_INT_CST_LOW (arg1) < prec
&& host_integerp (TREE_OPERAND (arg0, 1), false)
&& TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
&& TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
{
HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
+ TREE_INT_CST_LOW (arg1));
/* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
being well defined. */
if (low >= TYPE_PRECISION (type))
if (low >= prec)
{
if (code == LROTATE_EXPR || code == RROTATE_EXPR)
low = low % TYPE_PRECISION (type);
low = low % prec;
else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
return omit_one_operand_loc (loc, type, build_zero_cst (type),
TREE_OPERAND (arg0, 0));
else
low = TYPE_PRECISION (type) - 1;
low = prec - 1;
}
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
build_int_cst (type, low));
build_int_cst (TREE_TYPE (arg1), low));
}
/* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
......@@ -12446,9 +12448,9 @@ fold_binary_loc (location_t loc,
|| (TYPE_UNSIGNED (type)
&& code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
&& host_integerp (arg1, false)
&& TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
&& TREE_INT_CST_LOW (arg1) < prec
&& host_integerp (TREE_OPERAND (arg0, 1), false)
&& TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
&& TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
{
HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
......@@ -12459,8 +12461,8 @@ fold_binary_loc (location_t loc,
{
arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
lshift = build_int_cst (type, -1);
lshift = int_const_binop (code, lshift, arg1);
lshift = build_minus_one_cst (type);
lshift = const_binop (code, lshift, arg1);
return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
}
......@@ -12470,8 +12472,7 @@ fold_binary_loc (location_t loc,
RROTATE_EXPR by a new constant. */
if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
{
tree tem = build_int_cst (TREE_TYPE (arg1),
TYPE_PRECISION (type));
tree tem = build_int_cst (TREE_TYPE (arg1), prec);
tem = const_binop (MINUS_EXPR, tem, arg1);
return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
}
......@@ -12499,7 +12500,7 @@ fold_binary_loc (location_t loc,
&& TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
&& ((TREE_INT_CST_LOW (arg1)
+ TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
== (unsigned int) TYPE_PRECISION (type)))
== prec))
return TREE_OPERAND (arg0, 0);
/* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
......@@ -12912,8 +12913,8 @@ fold_binary_loc (location_t loc,
&& integer_zerop (arg1))
{
tree itype = TREE_TYPE (arg0);
unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
prec = TYPE_PRECISION (itype);
/* Check for a valid shift count. */
if (TREE_INT_CST_HIGH (arg001) == 0
......
......@@ -3083,8 +3083,6 @@ iterative_hash_canonical_type (tree type, hashval_t val)
if (INTEGRAL_TYPE_P (type)
|| SCALAR_FLOAT_TYPE_P (type)
|| FIXED_POINT_TYPE_P (type)
|| TREE_CODE (type) == VECTOR_TYPE
|| TREE_CODE (type) == COMPLEX_TYPE
|| TREE_CODE (type) == OFFSET_TYPE
|| POINTER_TYPE_P (type))
{
......@@ -3092,6 +3090,15 @@ iterative_hash_canonical_type (tree type, hashval_t val)
v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
}
if (VECTOR_TYPE_P (type))
{
v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
}
if (TREE_CODE (type) == COMPLEX_TYPE)
v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
/* For pointer and reference types, fold in information about the type
pointed to but do not recurse to the pointed-to type. */
if (POINTER_TYPE_P (type))
......
......@@ -297,6 +297,10 @@ extern unsigned get_mode_alignment (enum machine_mode);
#define GET_MODE_ALIGNMENT(MODE) get_mode_alignment (MODE)
/* Get the precision of the mode or its inner mode if it has one. */
extern unsigned int element_precision (enum machine_mode);
/* For each class, get the narrowest mode in that class. */
extern const unsigned char class_narrowest_mode[MAX_MODE_CLASS];
......
......@@ -453,6 +453,18 @@ get_mode_alignment (enum machine_mode mode)
return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
}
/* Return the precision of the mode, or for a complex or vector mode the
precision of the mode of its elements. */
unsigned int
element_precision (enum machine_mode mode)
{
if (COMPLEX_MODE_P (mode) || VECTOR_MODE_P (mode))
mode = GET_MODE_INNER (mode);
return GET_MODE_PRECISION (mode);
}
/* Return the natural mode of an array, given that it is SIZE bytes in
total and has elements of type ELEM_TYPE. */
......
2013-05-10 Marc Glisse <marc.glisse@inria.fr>
* gcc.dg/vector-shift.c: New testcase.
2013-05-10 Jakub Jelinek <jakub@redhat.com>
* gcc.target/i386/rotate-1.c: Accept rolb or rorb instruction.
......
/* { dg-do compile } */
/* { dg-options "-fdump-tree-original" } */
typedef unsigned vec __attribute__ ((vector_size (4 * sizeof (int))));
void
f (vec *x)
{
*x = (*x << 4) << 3;
}
/* { dg-final { scan-tree-dump "<< 7" "original" } } */
/* { dg-final { cleanup-tree-dump "original" } } */
......@@ -1643,6 +1643,45 @@ build_one_cst (tree type)
}
}
/* Return a constant of arithmetic type TYPE which is the
opposite of the multiplicative identity of the set TYPE. */
tree
build_minus_one_cst (tree type)
{
switch (TREE_CODE (type))
{
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
case POINTER_TYPE: case REFERENCE_TYPE:
case OFFSET_TYPE:
return build_int_cst (type, -1);
case REAL_TYPE:
return build_real (type, dconstm1);
case FIXED_POINT_TYPE:
/* We can only generate 1 for accum types. */
gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
return build_fixed (type, fixed_from_double_int (double_int_minus_one,
TYPE_MODE (type)));
case VECTOR_TYPE:
{
tree scalar = build_minus_one_cst (TREE_TYPE (type));
return build_vector_from_val (type, scalar);
}
case COMPLEX_TYPE:
return build_complex (type,
build_minus_one_cst (TREE_TYPE (type)),
build_zero_cst (TREE_TYPE (type)));
default:
gcc_unreachable ();
}
}
/* Build 0 constant of type TYPE. This is used by constructor folding
and thus the constant should be represented in memory by
zero(es). */
......@@ -6949,6 +6988,19 @@ valid_constant_size_p (const_tree size)
return true;
}
/* Return the precision of the type, or for a complex or vector type the
precision of the type of its elements. */
unsigned int
element_precision (const_tree type)
{
enum tree_code code = TREE_CODE (type);
if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
type = TREE_TYPE (type);
return TYPE_PRECISION (type);
}
/* Return true if CODE represents an associative tree code. Otherwise
return false. */
bool
......
......@@ -4767,6 +4767,7 @@ extern tree build_constructor_va (tree, int, ...);
extern tree build_real_from_int_cst (tree, const_tree);
extern tree build_complex (tree, tree, tree);
extern tree build_one_cst (tree);
extern tree build_minus_one_cst (tree);
extern tree build_zero_cst (tree);
extern tree build_string (int, const char *);
extern tree build_tree_list_stat (tree, tree MEM_STAT_DECL);
......@@ -4867,6 +4868,7 @@ extern bool may_negate_without_overflow_p (const_tree);
extern tree strip_array_types (tree);
extern tree excess_precision_type (tree);
extern bool valid_constant_size_p (const_tree);
extern unsigned int element_precision (const_tree);
/* Construct various nodes representing fract or accum data types. */
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment