Commit 8e846c66 by Richard Biener Committed by Richard Biener

tree-vect-data-refs.c (vect_find_stmt_data_reference): New function, combining…

tree-vect-data-refs.c (vect_find_stmt_data_reference): New function, combining stmt data ref gathering and fatal analysis parts.

2018-05-25  Richard Biener  <rguenther@suse.de>

	* tree-vect-data-refs.c (vect_find_stmt_data_reference): New
	function, combining stmt data ref gathering and fatal analysis
	parts.
	(vect_analyze_data_refs): Remove now redudnant code and simplify.
	* tree-vect-loop.c (vect_get_datarefs_in_loop): Factor out from
	vect_analyze_loop_2 and use vect_find_stmt_data_reference.
	* tree-vect-slp.c (vect_slp_bb): Use vect_find_stmt_data_reference.
	* tree-vectorizer.h (vect_find_stmt_data_reference): Declare.

From-SVN: r260754
parent 1623d9f3
2018-05-25 Richard Biener <rguenther@suse.de>
* tree-vect-data-refs.c (vect_find_stmt_data_reference): New
function, combining stmt data ref gathering and fatal analysis
parts.
(vect_analyze_data_refs): Remove now redudnant code and simplify.
* tree-vect-loop.c (vect_get_datarefs_in_loop): Factor out from
vect_analyze_loop_2 and use vect_find_stmt_data_reference.
* tree-vect-slp.c (vect_slp_bb): Use vect_find_stmt_data_reference.
* tree-vectorizer.h (vect_find_stmt_data_reference): Declare.
2018-05-25 Bin Cheng <bin.cheng@arm.com> 2018-05-25 Bin Cheng <bin.cheng@arm.com>
PR tree-optimization/85720 PR tree-optimization/85720
......
...@@ -3936,6 +3936,104 @@ vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo, ...@@ -3936,6 +3936,104 @@ vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo,
return true; return true;
} }
/* Find the data references in STMT, analyze them with respect to LOOP and
append them to DATAREFS. Return false if datarefs in this stmt cannot
be handled. */
bool
vect_find_stmt_data_reference (loop_p loop, gimple *stmt,
vec<data_reference_p> *datarefs)
{
/* We can ignore clobbers for dataref analysis - they are removed during
loop vectorization and BB vectorization checks dependences with a
stmt walk. */
if (gimple_clobber_p (stmt))
return true;
if (gimple_has_volatile_ops (stmt))
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: volatile type ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
return false;
}
if (stmt_can_throw_internal (stmt))
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: statement can throw an "
"exception ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
return false;
}
auto_vec<data_reference_p, 2> refs;
if (!find_data_references_in_stmt (loop, stmt, &refs))
return false;
if (refs.is_empty ())
return true;
if (refs.length () > 1)
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: more than one data ref "
"in stmt: ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
return false;
}
if (gcall *call = dyn_cast <gcall *> (stmt))
if (!gimple_call_internal_p (call)
|| (gimple_call_internal_fn (call) != IFN_MASK_LOAD
&& gimple_call_internal_fn (call) != IFN_MASK_STORE))
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: dr in a call ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
return false;
}
data_reference_p dr = refs.pop ();
if (TREE_CODE (DR_REF (dr)) == COMPONENT_REF
&& DECL_BIT_FIELD (TREE_OPERAND (DR_REF (dr), 1)))
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: statement is bitfield "
"access ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
return false;
}
if (DR_BASE_ADDRESS (dr)
&& TREE_CODE (DR_BASE_ADDRESS (dr)) == INTEGER_CST)
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: base addr of dr is a "
"constant\n");
return false;
}
datarefs->safe_push (dr);
return true;
}
/* Function vect_analyze_data_refs. /* Function vect_analyze_data_refs.
Find all the data references in the loop or basic block. Find all the data references in the loop or basic block.
...@@ -3974,38 +4072,14 @@ vect_analyze_data_refs (vec_info *vinfo, poly_uint64 *min_vf) ...@@ -3974,38 +4072,14 @@ vect_analyze_data_refs (vec_info *vinfo, poly_uint64 *min_vf)
{ {
gimple *stmt; gimple *stmt;
stmt_vec_info stmt_info; stmt_vec_info stmt_info;
tree base, offset, init;
enum { SG_NONE, GATHER, SCATTER } gatherscatter = SG_NONE; enum { SG_NONE, GATHER, SCATTER } gatherscatter = SG_NONE;
bool simd_lane_access = false; bool simd_lane_access = false;
poly_uint64 vf; poly_uint64 vf;
again: gcc_assert (DR_REF (dr));
if (!dr || !DR_REF (dr))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: unhandled data-ref\n");
return false;
}
stmt = DR_STMT (dr); stmt = DR_STMT (dr);
stmt_info = vinfo_for_stmt (stmt); stmt_info = vinfo_for_stmt (stmt);
/* Discard clobbers from the dataref vector. We will remove
clobber stmts during vectorization. */
if (gimple_clobber_p (stmt))
{
free_data_ref (dr);
if (i == datarefs.length () - 1)
{
datarefs.pop ();
break;
}
datarefs.ordered_remove (i);
dr = datarefs[i];
goto again;
}
/* Check that analysis of the data-ref succeeded. */ /* Check that analysis of the data-ref succeeded. */
if (!DR_BASE_ADDRESS (dr) || !DR_OFFSET (dr) || !DR_INIT (dr) if (!DR_BASE_ADDRESS (dr) || !DR_OFFSET (dr) || !DR_INIT (dr)
|| !DR_STEP (dr)) || !DR_STEP (dr))
...@@ -4117,95 +4191,42 @@ again: ...@@ -4117,95 +4191,42 @@ again:
} }
} }
if (TREE_CODE (DR_BASE_ADDRESS (dr)) == INTEGER_CST) if (TREE_CODE (DR_BASE_ADDRESS (dr)) == ADDR_EXPR
{ && VAR_P (TREE_OPERAND (DR_BASE_ADDRESS (dr), 0))
if (dump_enabled_p ()) && DECL_NONALIASED (TREE_OPERAND (DR_BASE_ADDRESS (dr), 0)))
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: base addr of dr is a "
"constant\n");
if (is_a <bb_vec_info> (vinfo))
break;
if (gatherscatter != SG_NONE || simd_lane_access)
free_data_ref (dr);
return false;
}
if (TREE_THIS_VOLATILE (DR_REF (dr)))
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: volatile type ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
if (is_a <bb_vec_info> (vinfo))
break;
return false;
}
if (stmt_can_throw_internal (stmt))
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: statement can throw an "
"exception ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
if (is_a <bb_vec_info> (vinfo))
break;
if (gatherscatter != SG_NONE || simd_lane_access)
free_data_ref (dr);
return false;
}
if (TREE_CODE (DR_REF (dr)) == COMPONENT_REF
&& DECL_BIT_FIELD (TREE_OPERAND (DR_REF (dr), 1)))
{ {
if (dump_enabled_p ()) if (dump_enabled_p ())
{ {
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: statement is bitfield " "not vectorized: base object not addressable "
"access "); "for stmt: ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0); dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
} }
if (is_a <bb_vec_info> (vinfo)) if (is_a <bb_vec_info> (vinfo))
break; {
/* In BB vectorization the ref can still participate
if (gatherscatter != SG_NONE || simd_lane_access) in dependence analysis, we just can't vectorize it. */
free_data_ref (dr); STMT_VINFO_VECTORIZABLE (stmt_info) = false;
return false; continue;
}
return false;
} }
base = unshare_expr (DR_BASE_ADDRESS (dr)); if (is_a <loop_vec_info> (vinfo)
offset = unshare_expr (DR_OFFSET (dr)); && TREE_CODE (DR_STEP (dr)) != INTEGER_CST)
init = unshare_expr (DR_INIT (dr));
if (is_gimple_call (stmt)
&& (!gimple_call_internal_p (stmt)
|| (gimple_call_internal_fn (stmt) != IFN_MASK_LOAD
&& gimple_call_internal_fn (stmt) != IFN_MASK_STORE)))
{ {
if (dump_enabled_p ()) if (nested_in_vect_loop_p (loop, stmt))
{ {
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, if (dump_enabled_p ())
"not vectorized: dr in a call "); {
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0); dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: not suitable for strided "
"load ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
return false;
} }
STMT_VINFO_STRIDED_P (stmt_info) = true;
if (is_a <bb_vec_info> (vinfo))
break;
if (gatherscatter != SG_NONE || simd_lane_access)
free_data_ref (dr);
return false;
} }
/* Update DR field in stmt_vec_info struct. */ /* Update DR field in stmt_vec_info struct. */
...@@ -4222,6 +4243,9 @@ again: ...@@ -4222,6 +4243,9 @@ again:
inner loop: *(BASE + INIT + OFFSET). By construction, inner loop: *(BASE + INIT + OFFSET). By construction,
this address must be invariant in the inner loop, so we this address must be invariant in the inner loop, so we
can consider it as being used in the outer loop. */ can consider it as being used in the outer loop. */
tree base = unshare_expr (DR_BASE_ADDRESS (dr));
tree offset = unshare_expr (DR_OFFSET (dr));
tree init = unshare_expr (DR_INIT (dr));
tree init_offset = fold_build2 (PLUS_EXPR, TREE_TYPE (offset), tree init_offset = fold_build2 (PLUS_EXPR, TREE_TYPE (offset),
init, offset); init, offset);
tree init_addr = fold_build_pointer_plus (base, init_offset); tree init_addr = fold_build_pointer_plus (base, init_offset);
...@@ -4267,24 +4291,7 @@ again: ...@@ -4267,24 +4291,7 @@ again:
} }
} }
if (STMT_VINFO_DATA_REF (stmt_info)) gcc_assert (!STMT_VINFO_DATA_REF (stmt_info));
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: more than one data ref "
"in stmt: ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
if (is_a <bb_vec_info> (vinfo))
break;
if (gatherscatter != SG_NONE || simd_lane_access)
free_data_ref (dr);
return false;
}
STMT_VINFO_DATA_REF (stmt_info) = dr; STMT_VINFO_DATA_REF (stmt_info) = dr;
if (simd_lane_access) if (simd_lane_access)
{ {
...@@ -4293,27 +4300,6 @@ again: ...@@ -4293,27 +4300,6 @@ again:
datarefs[i] = dr; datarefs[i] = dr;
} }
if (TREE_CODE (DR_BASE_ADDRESS (dr)) == ADDR_EXPR
&& VAR_P (TREE_OPERAND (DR_BASE_ADDRESS (dr), 0))
&& DECL_NONALIASED (TREE_OPERAND (DR_BASE_ADDRESS (dr), 0)))
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: base object not addressable "
"for stmt: ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
if (is_a <bb_vec_info> (vinfo))
{
/* In BB vectorization the ref can still participate
in dependence analysis, we just can't vectorize it. */
STMT_VINFO_VECTORIZABLE (stmt_info) = false;
continue;
}
return false;
}
/* Set vectype for STMT. */ /* Set vectype for STMT. */
scalar_type = TREE_TYPE (DR_REF (dr)); scalar_type = TREE_TYPE (DR_REF (dr));
STMT_VINFO_VECTYPE (stmt_info) STMT_VINFO_VECTYPE (stmt_info)
...@@ -4391,23 +4377,6 @@ again: ...@@ -4391,23 +4377,6 @@ again:
datarefs[i] = dr; datarefs[i] = dr;
STMT_VINFO_GATHER_SCATTER_P (stmt_info) = gatherscatter; STMT_VINFO_GATHER_SCATTER_P (stmt_info) = gatherscatter;
} }
else if (is_a <loop_vec_info> (vinfo)
&& TREE_CODE (DR_STEP (dr)) != INTEGER_CST)
{
if (nested_in_vect_loop_p (loop, stmt))
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: not suitable for strided "
"load ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
return false;
}
STMT_VINFO_STRIDED_P (stmt_info) = true;
}
} }
/* If we stopped analysis at the first dataref we could not analyze /* If we stopped analysis at the first dataref we could not analyze
......
...@@ -1774,40 +1774,12 @@ vect_analyze_loop_costing (loop_vec_info loop_vinfo) ...@@ -1774,40 +1774,12 @@ vect_analyze_loop_costing (loop_vec_info loop_vinfo)
return 1; return 1;
} }
/* Function vect_analyze_loop_2.
Apply a set of analyses on LOOP, and create a loop_vec_info struct
for it. The different analyses will record information in the
loop_vec_info struct. */
static bool static bool
vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal) vect_get_datarefs_in_loop (loop_p loop, basic_block *bbs,
vec<data_reference_p> *datarefs,
unsigned int *n_stmts)
{ {
bool ok; *n_stmts = 0;
int res;
unsigned int max_vf = MAX_VECTORIZATION_FACTOR;
poly_uint64 min_vf = 2;
unsigned int n_stmts = 0;
/* The first group of checks is independent of the vector size. */
fatal = true;
/* Find all data references in the loop (which correspond to vdefs/vuses)
and analyze their evolution in the loop. */
basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
loop_p loop = LOOP_VINFO_LOOP (loop_vinfo);
if (!find_loop_nest (loop, &LOOP_VINFO_LOOP_NEST (loop_vinfo)))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: loop nest containing two "
"or more consecutive inner loops cannot be "
"vectorized\n");
return false;
}
for (unsigned i = 0; i < loop->num_nodes; i++) for (unsigned i = 0; i < loop->num_nodes; i++)
for (gimple_stmt_iterator gsi = gsi_start_bb (bbs[i]); for (gimple_stmt_iterator gsi = gsi_start_bb (bbs[i]);
!gsi_end_p (gsi); gsi_next (&gsi)) !gsi_end_p (gsi); gsi_next (&gsi))
...@@ -1815,9 +1787,8 @@ vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal) ...@@ -1815,9 +1787,8 @@ vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal)
gimple *stmt = gsi_stmt (gsi); gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt)) if (is_gimple_debug (stmt))
continue; continue;
++n_stmts; ++(*n_stmts);
if (!find_data_references_in_stmt (loop, stmt, if (!vect_find_stmt_data_reference (loop, stmt, datarefs))
&LOOP_VINFO_DATAREFS (loop_vinfo)))
{ {
if (is_gimple_call (stmt) && loop->safelen) if (is_gimple_call (stmt) && loop->safelen)
{ {
...@@ -1849,14 +1820,55 @@ vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal) ...@@ -1849,14 +1820,55 @@ vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal)
} }
} }
} }
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: loop contains function "
"calls or data references that cannot "
"be analyzed\n");
return false; return false;
} }
} }
return true;
}
/* Function vect_analyze_loop_2.
Apply a set of analyses on LOOP, and create a loop_vec_info struct
for it. The different analyses will record information in the
loop_vec_info struct. */
static bool
vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal)
{
bool ok;
int res;
unsigned int max_vf = MAX_VECTORIZATION_FACTOR;
poly_uint64 min_vf = 2;
/* The first group of checks is independent of the vector size. */
fatal = true;
/* Find all data references in the loop (which correspond to vdefs/vuses)
and analyze their evolution in the loop. */
loop_p loop = LOOP_VINFO_LOOP (loop_vinfo);
if (!find_loop_nest (loop, &LOOP_VINFO_LOOP_NEST (loop_vinfo)))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: loop nest containing two "
"or more consecutive inner loops cannot be "
"vectorized\n");
return false;
}
/* Gather the data references and count stmts in the loop. */
unsigned int n_stmts;
if (!vect_get_datarefs_in_loop (loop, LOOP_VINFO_BBS (loop_vinfo),
&LOOP_VINFO_DATAREFS (loop_vinfo),
&n_stmts))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: loop contains function "
"calls or data references that cannot "
"be analyzed\n");
return false;
}
/* Analyze the data references and also adjust the minimal /* Analyze the data references and also adjust the minimal
vectorization factor according to the loads and stores. */ vectorization factor according to the loads and stores. */
......
...@@ -3011,7 +3011,7 @@ vect_slp_bb (basic_block bb) ...@@ -3011,7 +3011,7 @@ vect_slp_bb (basic_block bb)
if (gimple_location (stmt) != UNKNOWN_LOCATION) if (gimple_location (stmt) != UNKNOWN_LOCATION)
vect_location = gimple_location (stmt); vect_location = gimple_location (stmt);
if (!find_data_references_in_stmt (NULL, stmt, &datarefs)) if (!vect_find_stmt_data_reference (NULL, stmt, &datarefs))
break; break;
} }
......
...@@ -1495,6 +1495,8 @@ extern bool vect_gather_scatter_fn_p (bool, bool, tree, tree, unsigned int, ...@@ -1495,6 +1495,8 @@ extern bool vect_gather_scatter_fn_p (bool, bool, tree, tree, unsigned int,
signop, int, internal_fn *, tree *); signop, int, internal_fn *, tree *);
extern bool vect_check_gather_scatter (gimple *, loop_vec_info, extern bool vect_check_gather_scatter (gimple *, loop_vec_info,
gather_scatter_info *); gather_scatter_info *);
extern bool vect_find_stmt_data_reference (loop_p, gimple *,
vec<data_reference_p> *);
extern bool vect_analyze_data_refs (vec_info *, poly_uint64 *); extern bool vect_analyze_data_refs (vec_info *, poly_uint64 *);
extern void vect_record_base_alignments (vec_info *); extern void vect_record_base_alignments (vec_info *);
extern tree vect_create_data_ref_ptr (gimple *, tree, struct loop *, tree, extern tree vect_create_data_ref_ptr (gimple *, tree, struct loop *, tree,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment