Commit 08940f33 by Richard Biener Committed by Richard Biener

tree-vect-data-refs.c (vect_analyze_group_access): Properly handle negative step.

2013-04-18  Richard Biener  <rguenther@suse.de>

	* tree-vect-data-refs.c (vect_analyze_group_access): Properly
	handle negative step.  Remove redundant checks.
	(vect_create_data_ref_ptr): Avoid ICEs with non-constant steps.
	* tree-vect-stmts.c (vectorizable_load): Instead of asserting
	for negative step and grouped loads fail to vectorize.

From-SVN: r198054
parent 0e0f87d4
2013-04-18 Richard Biener <rguenther@suse.de>
* tree-vect-data-refs.c (vect_analyze_group_access): Properly
handle negative step. Remove redundant checks.
(vect_create_data_ref_ptr): Avoid ICEs with non-constant steps.
* tree-vect-stmts.c (vectorizable_load): Instead of asserting
for negative step and grouped loads fail to vectorize.
2013-04-18 Steven Bosscher <steven@gcc.gnu.org> 2013-04-18 Steven Bosscher <steven@gcc.gnu.org>
* emit-rtl.c (reset_insn_used_flags): New function. * emit-rtl.c (reset_insn_used_flags): New function.
......
...@@ -2024,7 +2024,7 @@ vect_analyze_group_access (struct data_reference *dr) ...@@ -2024,7 +2024,7 @@ vect_analyze_group_access (struct data_reference *dr)
/* For interleaving, GROUPSIZE is STEP counted in elements, i.e., the /* For interleaving, GROUPSIZE is STEP counted in elements, i.e., the
size of the interleaving group (including gaps). */ size of the interleaving group (including gaps). */
groupsize = dr_step / type_size; groupsize = absu_hwi (dr_step) / type_size;
/* Not consecutive access is possible only if it is a part of interleaving. */ /* Not consecutive access is possible only if it is a part of interleaving. */
if (!GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt))) if (!GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
...@@ -2094,10 +2094,10 @@ vect_analyze_group_access (struct data_reference *dr) ...@@ -2094,10 +2094,10 @@ vect_analyze_group_access (struct data_reference *dr)
gimple next = GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt)); gimple next = GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt));
struct data_reference *data_ref = dr; struct data_reference *data_ref = dr;
unsigned int count = 1; unsigned int count = 1;
tree next_step;
tree prev_init = DR_INIT (data_ref); tree prev_init = DR_INIT (data_ref);
gimple prev = stmt; gimple prev = stmt;
HOST_WIDE_INT diff, count_in_bytes, gaps = 0; HOST_WIDE_INT diff, gaps = 0;
unsigned HOST_WIDE_INT count_in_bytes;
while (next) while (next)
{ {
...@@ -2126,18 +2126,11 @@ vect_analyze_group_access (struct data_reference *dr) ...@@ -2126,18 +2126,11 @@ vect_analyze_group_access (struct data_reference *dr)
} }
prev = next; prev = next;
data_ref = STMT_VINFO_DATA_REF (vinfo_for_stmt (next));
/* Check that all the accesses have the same STEP. */ /* All group members have the same STEP by construction. */
next_step = DR_STEP (STMT_VINFO_DATA_REF (vinfo_for_stmt (next))); gcc_checking_assert (operand_equal_p (DR_STEP (data_ref), step, 0));
if (tree_int_cst_compare (step, next_step))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not consecutive access in interleaving");
return false;
}
data_ref = STMT_VINFO_DATA_REF (vinfo_for_stmt (next));
/* Check that the distance between two accesses is equal to the type /* Check that the distance between two accesses is equal to the type
size. Otherwise, we have gaps. */ size. Otherwise, we have gaps. */
diff = (TREE_INT_CST_LOW (DR_INIT (data_ref)) diff = (TREE_INT_CST_LOW (DR_INIT (data_ref))
...@@ -2175,7 +2168,8 @@ vect_analyze_group_access (struct data_reference *dr) ...@@ -2175,7 +2168,8 @@ vect_analyze_group_access (struct data_reference *dr)
/* Check that the size of the interleaving (including gaps) is not /* Check that the size of the interleaving (including gaps) is not
greater than STEP. */ greater than STEP. */
if (dr_step && dr_step < count_in_bytes + gaps * type_size) if (dr_step != 0
&& absu_hwi (dr_step) < count_in_bytes + gaps * type_size)
{ {
if (dump_enabled_p ()) if (dump_enabled_p ())
{ {
...@@ -2188,7 +2182,8 @@ vect_analyze_group_access (struct data_reference *dr) ...@@ -2188,7 +2182,8 @@ vect_analyze_group_access (struct data_reference *dr)
/* Check that the size of the interleaving is equal to STEP for stores, /* Check that the size of the interleaving is equal to STEP for stores,
i.e., that there are no gaps. */ i.e., that there are no gaps. */
if (dr_step && dr_step != count_in_bytes) if (dr_step != 0
&& absu_hwi (dr_step) != count_in_bytes)
{ {
if (DR_IS_READ (dr)) if (DR_IS_READ (dr))
{ {
...@@ -2208,7 +2203,8 @@ vect_analyze_group_access (struct data_reference *dr) ...@@ -2208,7 +2203,8 @@ vect_analyze_group_access (struct data_reference *dr)
} }
/* Check that STEP is a multiple of type size. */ /* Check that STEP is a multiple of type size. */
if (dr_step && (dr_step % type_size) != 0) if (dr_step != 0
&& (dr_step % type_size) != 0)
{ {
if (dump_enabled_p ()) if (dump_enabled_p ())
{ {
...@@ -3520,7 +3516,6 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop, ...@@ -3520,7 +3516,6 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
tree aptr; tree aptr;
gimple_stmt_iterator incr_gsi; gimple_stmt_iterator incr_gsi;
bool insert_after; bool insert_after;
bool negative;
tree indx_before_incr, indx_after_incr; tree indx_before_incr, indx_after_incr;
gimple incr; gimple incr;
tree step; tree step;
...@@ -3550,11 +3545,10 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop, ...@@ -3550,11 +3545,10 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
else else
step = DR_STEP (STMT_VINFO_DATA_REF (stmt_info)); step = DR_STEP (STMT_VINFO_DATA_REF (stmt_info));
if (tree_int_cst_compare (step, size_zero_node) == 0) if (integer_zerop (step))
*inv_p = true; *inv_p = true;
else else
*inv_p = false; *inv_p = false;
negative = tree_int_cst_compare (step, size_zero_node) < 0;
/* Create an expression for the first address accessed by this load /* Create an expression for the first address accessed by this load
in LOOP. */ in LOOP. */
...@@ -3693,18 +3687,18 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop, ...@@ -3693,18 +3687,18 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
else else
{ {
/* The step of the aggregate pointer is the type size. */ /* The step of the aggregate pointer is the type size. */
tree step = TYPE_SIZE_UNIT (aggr_type); tree iv_step = TYPE_SIZE_UNIT (aggr_type);
/* One exception to the above is when the scalar step of the load in /* One exception to the above is when the scalar step of the load in
LOOP is zero. In this case the step here is also zero. */ LOOP is zero. In this case the step here is also zero. */
if (*inv_p) if (*inv_p)
step = size_zero_node; iv_step = size_zero_node;
else if (negative) else if (tree_int_cst_sgn (step) == -1)
step = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step); iv_step = fold_build1 (NEGATE_EXPR, TREE_TYPE (iv_step), iv_step);
standard_iv_increment_position (loop, &incr_gsi, &insert_after); standard_iv_increment_position (loop, &incr_gsi, &insert_after);
create_iv (aggr_ptr_init, create_iv (aggr_ptr_init,
fold_convert (aggr_ptr_type, step), fold_convert (aggr_ptr_type, iv_step),
aggr_ptr, loop, &incr_gsi, insert_after, aggr_ptr, loop, &incr_gsi, insert_after,
&indx_before_incr, &indx_after_incr); &indx_before_incr, &indx_after_incr);
incr = gsi_stmt (incr_gsi); incr = gsi_stmt (incr_gsi);
......
...@@ -4465,7 +4465,13 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt, ...@@ -4465,7 +4465,13 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
if (negative) if (negative)
{ {
gcc_assert (!grouped_load); if (grouped_load)
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"negative step for group load not supported");
return false;
}
alignment_support_scheme = vect_supportable_dr_alignment (dr, false); alignment_support_scheme = vect_supportable_dr_alignment (dr, false);
if (alignment_support_scheme != dr_aligned if (alignment_support_scheme != dr_aligned
&& alignment_support_scheme != dr_unaligned_supported) && alignment_support_scheme != dr_unaligned_supported)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment