Commit d7770457 by Sebastian Pop Committed by Sebastian Pop

re PR tree-optimization/22236 (wrong code for casts and scev)

	PR tree-optimization/22236
	* tree-cfg.c (print_pred_bbs, print_succ_bbs): Correctly print
	successors and predecessors.
	* tree-chrec.c (chrec_convert): Before converting, check that
	sequences don't wrap.
	* tree-data-ref.c (compute_estimated_nb_iterations): Moved ...
	(analyze_array): Extern.
	(find_data_references_in_loop): Remove call to
	compute_estimated_nb_iterations.
	* tree-data-ref.h (analyze_array): Declared.
	* tree-flow-inline.h (single_ssa_tree_operand, single_ssa_use_operand,
	single_ssa_def_operand, zero_ssa_operands): Fix documentation.
	* tree-flow.h (scev_probably_wraps_p): Declare with an extra parameter.
	* tree-scalar-evolution.c (instantiate_parameters_1): Factor entry
	condition.
	* tree-ssa-loop-ivcanon.c: Fix documentation.
	* tree-ssa-loop-ivopts.c (idx_find_step): Add a fixme note.
	* tree-ssa-loop-niter.c (compute_estimated_nb_iterations): ... here.
	(infer_loop_bounds_from_undefined): New.
	(estimate_numbers_of_iterations_loop): Use
	infer_loop_bounds_from_undefined.
	(used_in_pointer_arithmetic_p): New.
	(scev_probably_wraps_p): Pass an extra parameter.  Call
	used_in_pointer_arithmetic_p.  Check that AT_STMT is not null.
	(convert_step): Fix documentation.
	* tree-vrp.c (adjust_range_with_scev): Call instantiate_parameters.
	Use initial_condition_in_loop_num and evolution_part_in_loop_num
	instead of CHREC_LEFT and CHREC_RIGHT.  Adjust the call to
	scev_probably_wraps_p.

From-SVN: r103055
parent e2df5c1d
2005-08-13 Sebastian Pop <pop@cri.ensmp.fr>
PR tree-optimization/22236
* tree-cfg.c (print_pred_bbs, print_succ_bbs): Correctly print
successors and predecessors.
* tree-chrec.c (chrec_convert): Before converting, check that
sequences don't wrap.
* tree-data-ref.c (compute_estimated_nb_iterations): Moved ...
(analyze_array): Extern.
(find_data_references_in_loop): Remove call to
compute_estimated_nb_iterations.
* tree-data-ref.h (analyze_array): Declared.
* tree-flow-inline.h (single_ssa_tree_operand, single_ssa_use_operand,
single_ssa_def_operand, zero_ssa_operands): Fix documentation.
* tree-flow.h (scev_probably_wraps_p): Declare with an extra parameter.
* tree-scalar-evolution.c (instantiate_parameters_1): Factor entry
condition.
* tree-ssa-loop-ivcanon.c: Fix documentation.
* tree-ssa-loop-ivopts.c (idx_find_step): Add a fixme note.
* tree-ssa-loop-niter.c (compute_estimated_nb_iterations): ... here.
(infer_loop_bounds_from_undefined): New.
(estimate_numbers_of_iterations_loop): Use
infer_loop_bounds_from_undefined.
(used_in_pointer_arithmetic_p): New.
(scev_probably_wraps_p): Pass an extra parameter. Call
used_in_pointer_arithmetic_p. Check that AT_STMT is not null.
(convert_step): Fix documentation.
* tree-vrp.c (adjust_range_with_scev): Call instantiate_parameters.
Use initial_condition_in_loop_num and evolution_part_in_loop_num
instead of CHREC_LEFT and CHREC_RIGHT. Adjust the call to
scev_probably_wraps_p.
2005-08-13 Ulrich Weigand <uweigand@de.ibm.com> 2005-08-13 Ulrich Weigand <uweigand@de.ibm.com>
* config/s390/s390.c (s390_split_branches): Revert 2005-08-12 change. * config/s390/s390.c (s390_split_branches): Revert 2005-08-12 change.
......
/* { dg-do run } */
/* { dg-options "-O1 -fno-tree-vrp -fwrapv" } */
/* PR tree-optimization/22236
Avoid conversion of (signed char) {(uchar)1, +, (uchar)1}_x when
it is not possible to prove that the scev does not wrap.
In this PR, a sequence 1, 2, ..., 255 has to be converted to
signed char, but this would wrap: 1, 2, ..., 127, -128, ... The
result should not be a linear scev {(schar)1, +, (schar)1}_x.
The conversion should be kept: (schar) {(uchar)1, +, (uchar)1}_x.
*/
void abort(void);
static inline void
foo (signed char a)
{
int b = a - 0x7F;
if (b > 1)
abort();
}
int main()
{
unsigned char b;
for(b = 0; b < 0xFF; b++)
foo (b);
return 0;
}
...@@ -4502,7 +4502,7 @@ static void print_pred_bbs (FILE *, basic_block bb); ...@@ -4502,7 +4502,7 @@ static void print_pred_bbs (FILE *, basic_block bb);
static void print_succ_bbs (FILE *, basic_block bb); static void print_succ_bbs (FILE *, basic_block bb);
/* Print the predecessors indexes of edge E on FILE. */ /* Print on FILE the indexes for the predecessors of basic_block BB. */
static void static void
print_pred_bbs (FILE *file, basic_block bb) print_pred_bbs (FILE *file, basic_block bb)
...@@ -4511,11 +4511,11 @@ print_pred_bbs (FILE *file, basic_block bb) ...@@ -4511,11 +4511,11 @@ print_pred_bbs (FILE *file, basic_block bb)
edge_iterator ei; edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds) FOR_EACH_EDGE (e, ei, bb->preds)
fprintf (file, "bb_%d", e->src->index); fprintf (file, "bb_%d ", e->src->index);
} }
/* Print the successors indexes of edge E on FILE. */ /* Print on FILE the indexes for the successors of basic_block BB. */
static void static void
print_succ_bbs (FILE *file, basic_block bb) print_succ_bbs (FILE *file, basic_block bb)
...@@ -4524,7 +4524,7 @@ print_succ_bbs (FILE *file, basic_block bb) ...@@ -4524,7 +4524,7 @@ print_succ_bbs (FILE *file, basic_block bb)
edge_iterator ei; edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->succs) FOR_EACH_EDGE (e, ei, bb->succs)
fprintf (file, "bb_%d", e->src->index); fprintf (file, "bb_%d ", e->dest->index);
} }
......
...@@ -1110,9 +1110,24 @@ chrec_convert (tree type, tree chrec, tree at_stmt) ...@@ -1110,9 +1110,24 @@ chrec_convert (tree type, tree chrec, tree at_stmt)
if (evolution_function_is_affine_p (chrec)) if (evolution_function_is_affine_p (chrec))
{ {
tree step = convert_step (current_loops->parray[CHREC_VARIABLE (chrec)], tree step;
type, CHREC_LEFT (chrec), CHREC_RIGHT (chrec), bool dummy;
at_stmt);
/* Avoid conversion of (signed char) {(uchar)1, +, (uchar)1}_x
when it is not possible to prove that the scev does not wrap.
See PR22236, where a sequence 1, 2, ..., 255 has to be
converted to signed char, but this would wrap:
1, 2, ..., 127, -128, ... The result should not be
{(schar)1, +, (schar)1}_x, but instead, we should keep the
conversion: (schar) {(uchar)1, +, (uchar)1}_x. */
if (scev_probably_wraps_p (type, CHREC_LEFT (chrec), CHREC_RIGHT (chrec),
at_stmt,
current_loops->parray[CHREC_VARIABLE (chrec)],
&dummy, &dummy))
return fold_convert (type, chrec);
step = convert_step (current_loops->parray[CHREC_VARIABLE (chrec)], type,
CHREC_LEFT (chrec), CHREC_RIGHT (chrec), at_stmt);
if (!step) if (!step)
return fold_convert (type, chrec); return fold_convert (type, chrec);
......
...@@ -731,23 +731,6 @@ dump_ddrs (FILE *file, varray_type ddrs) ...@@ -731,23 +731,6 @@ dump_ddrs (FILE *file, varray_type ddrs)
/* Initialize LOOP->ESTIMATED_NB_ITERATIONS with the lowest safe
approximation of the number of iterations for LOOP. */
static void
compute_estimated_nb_iterations (struct loop *loop)
{
struct nb_iter_bound *bound;
for (bound = loop->bounds; bound; bound = bound->next)
if (TREE_CODE (bound->bound) == INTEGER_CST
/* Update only when there is no previous estimation. */
&& (chrec_contains_undetermined (loop->estimated_nb_iterations)
/* Or when the current estimation is smaller. */
|| tree_int_cst_lt (bound->bound, loop->estimated_nb_iterations)))
loop->estimated_nb_iterations = bound->bound;
}
/* Estimate the number of iterations from the size of the data and the /* Estimate the number of iterations from the size of the data and the
access functions. */ access functions. */
...@@ -830,7 +813,7 @@ analyze_array_indexes (struct loop *loop, ...@@ -830,7 +813,7 @@ analyze_array_indexes (struct loop *loop,
set to true when REF is in the right hand side of an set to true when REF is in the right hand side of an
assignment. */ assignment. */
static struct data_reference * struct data_reference *
analyze_array (tree stmt, tree ref, bool is_read) analyze_array (tree stmt, tree ref, bool is_read)
{ {
struct data_reference *res; struct data_reference *res;
...@@ -3644,9 +3627,6 @@ find_data_references_in_loop (struct loop *loop, varray_type *datarefs) ...@@ -3644,9 +3627,6 @@ find_data_references_in_loop (struct loop *loop, varray_type *datarefs)
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS)) if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
loop->parallel_p = false; loop->parallel_p = false;
} }
if (chrec_contains_undetermined (loop->estimated_nb_iterations))
compute_estimated_nb_iterations (loop);
} }
free (bbs); free (bbs);
......
...@@ -264,6 +264,7 @@ extern void free_dependence_relation (struct data_dependence_relation *); ...@@ -264,6 +264,7 @@ extern void free_dependence_relation (struct data_dependence_relation *);
extern void free_dependence_relations (varray_type); extern void free_dependence_relations (varray_type);
extern void free_data_refs (varray_type); extern void free_data_refs (varray_type);
extern void compute_subscript_distance (struct data_dependence_relation *); extern void compute_subscript_distance (struct data_dependence_relation *);
extern struct data_reference *analyze_array (tree, tree, bool);
......
...@@ -1191,7 +1191,7 @@ op_iter_init_must_and_may_def (ssa_op_iter *ptr, tree stmt, ...@@ -1191,7 +1191,7 @@ op_iter_init_must_and_may_def (ssa_op_iter *ptr, tree stmt,
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. PTR is the iterator to use. */ return NULL. */
static inline tree static inline tree
single_ssa_tree_operand (tree stmt, int flags) single_ssa_tree_operand (tree stmt, int flags)
{ {
...@@ -1209,7 +1209,7 @@ single_ssa_tree_operand (tree stmt, int flags) ...@@ -1209,7 +1209,7 @@ single_ssa_tree_operand (tree stmt, int flags)
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. PTR is the iterator to use. */ return NULL. */
static inline use_operand_p static inline use_operand_p
single_ssa_use_operand (tree stmt, int flags) single_ssa_use_operand (tree stmt, int flags)
{ {
...@@ -1228,7 +1228,7 @@ single_ssa_use_operand (tree stmt, int flags) ...@@ -1228,7 +1228,7 @@ single_ssa_use_operand (tree stmt, int flags)
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. PTR is the iterator to use. */ return NULL. */
static inline def_operand_p static inline def_operand_p
single_ssa_def_operand (tree stmt, int flags) single_ssa_def_operand (tree stmt, int flags)
{ {
...@@ -1246,7 +1246,7 @@ single_ssa_def_operand (tree stmt, int flags) ...@@ -1246,7 +1246,7 @@ single_ssa_def_operand (tree stmt, int flags)
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. PTR is the iterator to use. */ return NULL. */
static inline bool static inline bool
zero_ssa_operands (tree stmt, int flags) zero_ssa_operands (tree stmt, int flags)
{ {
......
...@@ -728,7 +728,8 @@ tree find_loop_niter (struct loop *, edge *); ...@@ -728,7 +728,8 @@ tree find_loop_niter (struct loop *, edge *);
tree loop_niter_by_eval (struct loop *, edge); tree loop_niter_by_eval (struct loop *, edge);
tree find_loop_niter_by_eval (struct loop *, edge *); tree find_loop_niter_by_eval (struct loop *, edge *);
void estimate_numbers_of_iterations (struct loops *); void estimate_numbers_of_iterations (struct loops *);
bool scev_probably_wraps_p (tree, tree, tree, tree, struct loop *, bool *); bool scev_probably_wraps_p (tree, tree, tree, tree, struct loop *, bool *,
bool *);
tree convert_step (struct loop *, tree, tree, tree, tree); tree convert_step (struct loop *, tree, tree, tree, tree);
void free_numbers_of_iterations_estimates (struct loops *); void free_numbers_of_iterations_estimates (struct loops *);
void rewrite_into_loop_closed_ssa (bitmap, unsigned); void rewrite_into_loop_closed_ssa (bitmap, unsigned);
......
...@@ -1939,11 +1939,8 @@ instantiate_parameters_1 (struct loop *loop, tree chrec, ...@@ -1939,11 +1939,8 @@ instantiate_parameters_1 (struct loop *loop, tree chrec,
basic_block def_bb; basic_block def_bb;
struct loop *def_loop; struct loop *def_loop;
if (chrec == NULL_TREE if (automatically_generated_chrec_p (chrec)
|| automatically_generated_chrec_p (chrec)) || is_gimple_min_invariant (chrec))
return chrec;
if (is_gimple_min_invariant (chrec))
return chrec; return chrec;
switch (TREE_CODE (chrec)) switch (TREE_CODE (chrec))
......
...@@ -267,7 +267,7 @@ try_unroll_loop_completely (struct loops *loops ATTRIBUTE_UNUSED, ...@@ -267,7 +267,7 @@ try_unroll_loop_completely (struct loops *loops ATTRIBUTE_UNUSED,
} }
/* Adds a canonical induction variable to LOOP if suitable. LOOPS is the loops /* Adds a canonical induction variable to LOOP if suitable. LOOPS is the loops
tree. CREATE_IV is true if we may create a new iv. UL determines what tree. CREATE_IV is true if we may create a new iv. UL determines
which loops we are allowed to completely unroll. If TRY_EVAL is true, we try which loops we are allowed to completely unroll. If TRY_EVAL is true, we try
to determine the number of iterations of a loop by direct evaluation. to determine the number of iterations of a loop by direct evaluation.
Returns true if cfg is changed. */ Returns true if cfg is changed. */
......
...@@ -1443,6 +1443,8 @@ idx_find_step (tree base, tree *idx, void *data) ...@@ -1443,6 +1443,8 @@ idx_find_step (tree base, tree *idx, void *data)
/* The step for pointer arithmetics already is 1 byte. */ /* The step for pointer arithmetics already is 1 byte. */
step = build_int_cst (sizetype, 1); step = build_int_cst (sizetype, 1);
/* FIXME: convert_step should not be used outside chrec_convert: fix
this by calling chrec_convert. */
iv_step = convert_step (dta->ivopts_data->current_loop, iv_step = convert_step (dta->ivopts_data->current_loop,
sizetype, iv->base, iv->step, dta->stmt); sizetype, iv->base, iv->step, dta->stmt);
......
...@@ -1381,6 +1381,128 @@ record_estimate (struct loop *loop, tree bound, tree additional, tree at_stmt) ...@@ -1381,6 +1381,128 @@ record_estimate (struct loop *loop, tree bound, tree additional, tree at_stmt)
loop->bounds = elt; loop->bounds = elt;
} }
/* Initialize LOOP->ESTIMATED_NB_ITERATIONS with the lowest safe
approximation of the number of iterations for LOOP. */
static void
compute_estimated_nb_iterations (struct loop *loop)
{
struct nb_iter_bound *bound;
for (bound = loop->bounds; bound; bound = bound->next)
if (TREE_CODE (bound->bound) == INTEGER_CST
/* Update only when there is no previous estimation. */
&& (chrec_contains_undetermined (loop->estimated_nb_iterations)
/* Or when the current estimation is smaller. */
|| tree_int_cst_lt (bound->bound, loop->estimated_nb_iterations)))
loop->estimated_nb_iterations = bound->bound;
}
/* The following analyzers are extracting informations on the bounds
of LOOP from the following undefined behaviors:
- data references should not access elements over the statically
allocated size,
- signed variables should not overflow when flag_wrapv is not set.
*/
static void
infer_loop_bounds_from_undefined (struct loop *loop)
{
unsigned i;
basic_block bb, *bbs;
block_stmt_iterator bsi;
bbs = get_loop_body (loop);
for (i = 0; i < loop->num_nodes; i++)
{
bb = bbs[i];
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
tree stmt = bsi_stmt (bsi);
switch (TREE_CODE (stmt))
{
case MODIFY_EXPR:
{
tree op0 = TREE_OPERAND (stmt, 0);
tree op1 = TREE_OPERAND (stmt, 1);
/* For each array access, analyze its access function
and record a bound on the loop iteration domain. */
if (TREE_CODE (op1) == ARRAY_REF)
analyze_array (stmt, op1, true);
if (TREE_CODE (op0) == ARRAY_REF)
analyze_array (stmt, op0, false);
/* For each signed type variable in LOOP, analyze its
scalar evolution and record a bound of the loop
based on the type's ranges. */
else if (!flag_wrapv && TREE_CODE (op0) == SSA_NAME)
{
tree init, step, diff, estimation;
tree scev = instantiate_parameters
(loop, analyze_scalar_evolution (loop, op0));
tree type = chrec_type (scev);
tree utype;
if (chrec_contains_undetermined (scev)
|| TYPE_UNSIGNED (type))
break;
init = initial_condition_in_loop_num (scev, loop->num);
step = evolution_part_in_loop_num (scev, loop->num);
if (init == NULL_TREE
|| step == NULL_TREE
|| TREE_CODE (init) != INTEGER_CST
|| TREE_CODE (step) != INTEGER_CST)
break;
utype = unsigned_type_for (type);
if (tree_int_cst_lt (step, integer_zero_node))
diff = fold (build2 (MINUS_EXPR, utype, init,
TYPE_MIN_VALUE (type)));
else
diff = fold (build2 (MINUS_EXPR, utype,
TYPE_MAX_VALUE (type), init));
estimation = fold (build2 (CEIL_DIV_EXPR, utype, diff,
step));
record_estimate (loop, estimation, boolean_true_node, stmt);
}
break;
}
case CALL_EXPR:
{
tree args;
for (args = TREE_OPERAND (stmt, 1); args;
args = TREE_CHAIN (args))
if (TREE_CODE (TREE_VALUE (args)) == ARRAY_REF)
analyze_array (stmt, TREE_VALUE (args), true);
break;
}
default:
break;
}
}
if (chrec_contains_undetermined (loop->estimated_nb_iterations))
compute_estimated_nb_iterations (loop);
}
free (bbs);
}
/* Records estimates on numbers of iterations of LOOP. */ /* Records estimates on numbers of iterations of LOOP. */
static void static void
...@@ -1419,14 +1541,8 @@ estimate_numbers_of_iterations_loop (struct loop *loop) ...@@ -1419,14 +1541,8 @@ estimate_numbers_of_iterations_loop (struct loop *loop)
} }
free (exits); free (exits);
/* Analyzes the bounds of arrays accessed in the loop. */
if (chrec_contains_undetermined (loop->estimated_nb_iterations)) if (chrec_contains_undetermined (loop->estimated_nb_iterations))
{ infer_loop_bounds_from_undefined (loop);
varray_type datarefs;
VARRAY_GENERIC_PTR_INIT (datarefs, 3, "datarefs");
find_data_references_in_loop (loop, &datarefs);
free_data_refs (datarefs);
}
} }
/* Records estimates on numbers of iterations of LOOPS. */ /* Records estimates on numbers of iterations of LOOPS. */
...@@ -1645,6 +1761,43 @@ convert_step_widening (struct loop *loop, tree new_type, tree base, tree step, ...@@ -1645,6 +1761,43 @@ convert_step_widening (struct loop *loop, tree new_type, tree base, tree step,
return NULL_TREE; return NULL_TREE;
} }
/* Returns true when VAR is used in pointer arithmetics. DEPTH is
used for limiting the search. */
static bool
used_in_pointer_arithmetic_p (tree var, int depth)
{
use_operand_p use_p;
imm_use_iterator iter;
if (depth == 0
|| TREE_CODE (var) != SSA_NAME
|| !has_single_use (var))
return false;
FOR_EACH_IMM_USE_FAST (use_p, iter, var)
{
tree stmt = USE_STMT (use_p);
if (stmt && TREE_CODE (stmt) == MODIFY_EXPR)
{
tree rhs = TREE_OPERAND (stmt, 1);
if (TREE_CODE (rhs) == NOP_EXPR
|| TREE_CODE (rhs) == CONVERT_EXPR)
{
if (POINTER_TYPE_P (TREE_TYPE (rhs)))
return true;
return false;
}
else
return used_in_pointer_arithmetic_p (TREE_OPERAND (stmt, 0),
depth - 1);
}
}
return false;
}
/* Return false only when the induction variable BASE + STEP * I is /* Return false only when the induction variable BASE + STEP * I is
known to not overflow: i.e. when the number of iterations is small known to not overflow: i.e. when the number of iterations is small
enough with respect to the step and initial condition in order to enough with respect to the step and initial condition in order to
...@@ -1652,19 +1805,60 @@ convert_step_widening (struct loop *loop, tree new_type, tree base, tree step, ...@@ -1652,19 +1805,60 @@ convert_step_widening (struct loop *loop, tree new_type, tree base, tree step,
iv is known to overflow or when the property is not computable. iv is known to overflow or when the property is not computable.
Initialize INIT_IS_MAX to true when the evolution goes from Initialize INIT_IS_MAX to true when the evolution goes from
INIT_IS_MAX to LOWER_BOUND_IN_TYPE, false in the contrary case, not INIT_IS_MAX to LOWER_BOUND_IN_TYPE, false in the contrary case.
defined when the function returns true. */ When this property cannot be determined, UNKNOWN_MAX is set to
true. */
bool bool
scev_probably_wraps_p (tree type, tree base, tree step, scev_probably_wraps_p (tree type, tree base, tree step,
tree at_stmt, struct loop *loop, tree at_stmt, struct loop *loop,
bool *init_is_max) bool *init_is_max, bool *unknown_max)
{ {
struct nb_iter_bound *bound; struct nb_iter_bound *bound;
tree delta, step_abs; tree delta, step_abs;
tree unsigned_type, valid_niter; tree unsigned_type, valid_niter;
tree base_plus_step = fold_build2 (PLUS_EXPR, type, base, step); tree base_plus_step;
/* FIXME: The following code will not be used anymore once
http://gcc.gnu.org/ml/gcc-patches/2005-06/msg02025.html is
committed.
If AT_STMT is a cast to unsigned that is later used for
referencing a memory location, it is followed by a pointer
conversion just after. Because pointers do not wrap, the
sequences that reference the memory do not wrap either. In the
following example, sequences corresponding to D_13 and to D_14
can be proved to not wrap because they are used for computing a
memory access:
D.1621_13 = (long unsigned intD.4) D.1620_12;
D.1622_14 = D.1621_13 * 8;
D.1623_15 = (doubleD.29 *) D.1622_14;
*/
if (at_stmt && TREE_CODE (at_stmt) == MODIFY_EXPR)
{
tree op0 = TREE_OPERAND (at_stmt, 0);
tree op1 = TREE_OPERAND (at_stmt, 1);
tree type_op1 = TREE_TYPE (op1);
if ((TYPE_UNSIGNED (type_op1)
&& used_in_pointer_arithmetic_p (op0, 2))
|| POINTER_TYPE_P (type_op1))
{
*unknown_max = true;
return false;
}
}
if (TREE_CODE (base) == REAL_CST
|| TREE_CODE (step) == REAL_CST)
{
*unknown_max = true;
return true;
}
*unknown_max = false;
base_plus_step = fold_build2 (PLUS_EXPR, type, base, step);
switch (compare_trees (base_plus_step, base)) switch (compare_trees (base_plus_step, base))
{ {
case -1: case -1:
...@@ -1691,6 +1885,7 @@ scev_probably_wraps_p (tree type, tree base, tree step, ...@@ -1691,6 +1885,7 @@ scev_probably_wraps_p (tree type, tree base, tree step,
don't know as in the default case. */ don't know as in the default case. */
default: default:
*unknown_max = true;
return true; return true;
} }
...@@ -1709,7 +1904,7 @@ scev_probably_wraps_p (tree type, tree base, tree step, ...@@ -1709,7 +1904,7 @@ scev_probably_wraps_p (tree type, tree base, tree step,
i_2 to wrap around, but not i.0_6, because it is of a signed i_2 to wrap around, but not i.0_6, because it is of a signed
type. This causes VRP to erroneously fold the predicate above type. This causes VRP to erroneously fold the predicate above
because it thinks that i.0_6 cannot be negative. */ because it thinks that i.0_6 cannot be negative. */
if (TREE_CODE (at_stmt) == MODIFY_EXPR) if (at_stmt && TREE_CODE (at_stmt) == MODIFY_EXPR)
{ {
tree rhs = TREE_OPERAND (at_stmt, 1); tree rhs = TREE_OPERAND (at_stmt, 1);
tree outer_t = TREE_TYPE (rhs); tree outer_t = TREE_TYPE (rhs);
...@@ -1725,9 +1920,12 @@ scev_probably_wraps_p (tree type, tree base, tree step, ...@@ -1725,9 +1920,12 @@ scev_probably_wraps_p (tree type, tree base, tree step,
if (TYPE_UNSIGNED (inner_t) if (TYPE_UNSIGNED (inner_t)
&& (TYPE_SIZE (inner_t) <= TYPE_SIZE (outer_t) && (TYPE_SIZE (inner_t) <= TYPE_SIZE (outer_t)
|| TYPE_PRECISION (inner_t) <= TYPE_PRECISION (outer_t))) || TYPE_PRECISION (inner_t) <= TYPE_PRECISION (outer_t)))
{
*unknown_max = true;
return true; return true;
} }
} }
}
/* After having set INIT_IS_MAX, we can return false: when not using /* After having set INIT_IS_MAX, we can return false: when not using
wrapping arithmetic, signed types don't wrap. */ wrapping arithmetic, signed types don't wrap. */
...@@ -1746,11 +1944,13 @@ scev_probably_wraps_p (tree type, tree base, tree step, ...@@ -1746,11 +1944,13 @@ scev_probably_wraps_p (tree type, tree base, tree step,
/* At this point we still don't have a proof that the iv does not /* At this point we still don't have a proof that the iv does not
overflow: give up. */ overflow: give up. */
*unknown_max = true;
return true; return true;
} }
/* Return the conversion to NEW_TYPE of the STEP of an induction /* Return the conversion to NEW_TYPE of the STEP of an induction
variable BASE + STEP * I at AT_STMT. */ variable BASE + STEP * I at AT_STMT. When it fails, return
NULL_TREE. */
tree tree
convert_step (struct loop *loop, tree new_type, tree base, tree step, convert_step (struct loop *loop, tree new_type, tree base, tree step,
......
...@@ -1534,29 +1534,31 @@ adjust_range_with_scev (value_range_t *vr, struct loop *loop, tree stmt, ...@@ -1534,29 +1534,31 @@ adjust_range_with_scev (value_range_t *vr, struct loop *loop, tree stmt,
tree var) tree var)
{ {
tree init, step, chrec; tree init, step, chrec;
bool init_is_max; bool init_is_max, unknown_max;
/* TODO. Don't adjust anti-ranges. An anti-range may provide /* TODO. Don't adjust anti-ranges. An anti-range may provide
better opportunities than a regular range, but I'm not sure. */ better opportunities than a regular range, but I'm not sure. */
if (vr->type == VR_ANTI_RANGE) if (vr->type == VR_ANTI_RANGE)
return; return;
chrec = analyze_scalar_evolution (loop, var); chrec = instantiate_parameters (loop, analyze_scalar_evolution (loop, var));
if (TREE_CODE (chrec) != POLYNOMIAL_CHREC) if (TREE_CODE (chrec) != POLYNOMIAL_CHREC)
return; return;
init = CHREC_LEFT (chrec); init = initial_condition_in_loop_num (chrec, loop->num);
step = CHREC_RIGHT (chrec); step = evolution_part_in_loop_num (chrec, loop->num);
/* If STEP is symbolic, we can't know whether INIT will be the /* If STEP is symbolic, we can't know whether INIT will be the
minimum or maximum value in the range. */ minimum or maximum value in the range. */
if (!is_gimple_min_invariant (step)) if (step == NULL_TREE
|| !is_gimple_min_invariant (step))
return; return;
/* Do not adjust ranges when chrec may wrap. */ /* Do not adjust ranges when chrec may wrap. */
if (scev_probably_wraps_p (chrec_type (chrec), init, step, stmt, if (scev_probably_wraps_p (chrec_type (chrec), init, step, stmt,
cfg_loops->parray[CHREC_VARIABLE (chrec)], cfg_loops->parray[CHREC_VARIABLE (chrec)],
&init_is_max)) &init_is_max, &unknown_max)
|| unknown_max)
return; return;
if (!POINTER_TYPE_P (TREE_TYPE (init)) if (!POINTER_TYPE_P (TREE_TYPE (init))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment