Commit 10c7a96f by Steven Bosscher Committed by Steven Bosscher

convert.c (convert): Replace fold (buildN (...)) with fold_buildN.

	* convert.c (convert): Replace fold (buildN (...)) with fold_buildN.
	* trans-array.c (gfc_trans_allocate_array_storage,
	gfc_trans_allocate_temp_array gfc_trans_array_constructor_value,
	gfc_conv_array_index_ref, gfc_trans_array_bound_check,
	gfc_conv_array_index_offset, gfc_conv_scalarized_array_ref,
	gfc_conv_array_ref, gfc_trans_preloop_setup, gfc_conv_ss_startstride,
	gfc_conv_loop_setup, gfc_array_init_size, gfc_trans_array_bounds,
	gfc_trans_auto_array_allocation, gfc_trans_dummy_array_bias,
	gfc_conv_expr_descriptor): Likewise.
	* trans-expr.c (gfc_conv_powi, gfc_conv_string_tmp,
	gfc_conv_concat_op, gfc_conv_expr_op): Likewise.
	* trans-intrinsic.c (build_round_expr, gfc_conv_intrinsic_bound,
	gfc_conv_intrinsic_cmplx, gfc_conv_intrinsic_sign,
	gfc_conv_intrinsic_minmaxloc, gfc_conv_intrinsic_minmaxval,
	gfc_conv_intrinsic_btest, gfc_conv_intrinsic_bitop,
	gfc_conv_intrinsic_singlebitop, gfc_conv_intrinsic_ibits,
	gfc_conv_intrinsic_ishft, gfc_conv_intrinsic_ishftc,
	gfc_conv_intrinsic_merge, prepare_arg_info,
	gfc_conv_intrinsic_rrspacing, gfc_conv_intrinsic_repeat): Likewise.
	* trans-stmt.c (gfc_trans_simple_do, gfc_trans_do, gfc_trans_do_while,
	gfc_trans_forall_loop, gfc_do_allocate, generate_loop_for_temp_to_lhs,
	generate_loop_for_rhs_to_temp, compute_inner_temp_size,
	allocate_temp_for_forall_nest, gfc_trans_pointer_assign_need_temp,
	gfc_trans_forall_1, gfc_evaluate_where_mask, gfc_trans_where_assign):
	Likewise.
	* trans-types.c (gfc_get_dtype, gfc_get_array_type_bounds): Likewise.
	* trans.c (gfc_add_modify_expr): Likewise.

From-SVN: r96926
parent 2bc3f466
2005-03-23 Steven Bosscher <stevenb@suse.de>
* convert.c (convert): Replace fold (buildN (...)) with fold_buildN.
* trans-array.c (gfc_trans_allocate_array_storage,
gfc_trans_allocate_temp_array gfc_trans_array_constructor_value,
gfc_conv_array_index_ref, gfc_trans_array_bound_check,
gfc_conv_array_index_offset, gfc_conv_scalarized_array_ref,
gfc_conv_array_ref, gfc_trans_preloop_setup, gfc_conv_ss_startstride,
gfc_conv_loop_setup, gfc_array_init_size, gfc_trans_array_bounds,
gfc_trans_auto_array_allocation, gfc_trans_dummy_array_bias,
gfc_conv_expr_descriptor): Likewise.
* trans-expr.c (gfc_conv_powi, gfc_conv_string_tmp,
gfc_conv_concat_op, gfc_conv_expr_op): Likewise.
* trans-intrinsic.c (build_round_expr, gfc_conv_intrinsic_bound,
gfc_conv_intrinsic_cmplx, gfc_conv_intrinsic_sign,
gfc_conv_intrinsic_minmaxloc, gfc_conv_intrinsic_minmaxval,
gfc_conv_intrinsic_btest, gfc_conv_intrinsic_bitop,
gfc_conv_intrinsic_singlebitop, gfc_conv_intrinsic_ibits,
gfc_conv_intrinsic_ishft, gfc_conv_intrinsic_ishftc,
gfc_conv_intrinsic_merge, prepare_arg_info,
gfc_conv_intrinsic_rrspacing, gfc_conv_intrinsic_repeat): Likewise.
* trans-stmt.c (gfc_trans_simple_do, gfc_trans_do, gfc_trans_do_while,
gfc_trans_forall_loop, gfc_do_allocate, generate_loop_for_temp_to_lhs,
generate_loop_for_rhs_to_temp, compute_inner_temp_size,
allocate_temp_for_forall_nest, gfc_trans_pointer_assign_need_temp,
gfc_trans_forall_1, gfc_evaluate_where_mask, gfc_trans_where_assign):
Likewise.
* trans-types.c (gfc_get_dtype, gfc_get_array_type_bounds): Likewise.
* trans.c (gfc_add_modify_expr): Likewise.
2005-03-22 Francois-Xavier Coudert <coudert@clipper.ens.fr> 2005-03-22 Francois-Xavier Coudert <coudert@clipper.ens.fr>
* check.c (gfc_check_chdir, gfc_check_chdir_sub, gfc_check_kill, * check.c (gfc_check_chdir, gfc_check_chdir_sub, gfc_check_kill,
......
...@@ -81,7 +81,7 @@ convert (tree type, tree expr) ...@@ -81,7 +81,7 @@ convert (tree type, tree expr)
return expr; return expr;
if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (expr))) if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (expr)))
return fold (build1 (NOP_EXPR, type, expr)); return fold_build1 (NOP_EXPR, type, expr);
if (TREE_CODE (TREE_TYPE (expr)) == ERROR_MARK) if (TREE_CODE (TREE_TYPE (expr)) == ERROR_MARK)
return error_mark_node; return error_mark_node;
if (TREE_CODE (TREE_TYPE (expr)) == VOID_TYPE) if (TREE_CODE (TREE_TYPE (expr)) == VOID_TYPE)
...@@ -106,9 +106,9 @@ convert (tree type, tree expr) ...@@ -106,9 +106,9 @@ convert (tree type, tree expr)
/* If we have a NOP_EXPR, we must fold it here to avoid /* If we have a NOP_EXPR, we must fold it here to avoid
infinite recursion between fold () and convert (). */ infinite recursion between fold () and convert (). */
if (TREE_CODE (e) == NOP_EXPR) if (TREE_CODE (e) == NOP_EXPR)
return fold (build1 (NOP_EXPR, type, TREE_OPERAND (e, 0))); return fold_build1 (NOP_EXPR, type, TREE_OPERAND (e, 0));
else else
return fold (build1 (NOP_EXPR, type, e)); return fold_build1 (NOP_EXPR, type, e);
} }
if (code == POINTER_TYPE || code == REFERENCE_TYPE) if (code == POINTER_TYPE || code == REFERENCE_TYPE)
return fold (convert_to_pointer (type, e)); return fold (convert_to_pointer (type, e));
......
...@@ -429,8 +429,8 @@ gfc_trans_allocate_array_storage (gfc_loopinfo * loop, gfc_ss_info * info, ...@@ -429,8 +429,8 @@ gfc_trans_allocate_array_storage (gfc_loopinfo * loop, gfc_ss_info * info,
if (onstack) if (onstack)
{ {
/* Make a temporary variable to hold the data. */ /* Make a temporary variable to hold the data. */
tmp = fold (build2 (MINUS_EXPR, TREE_TYPE (nelem), nelem, tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (nelem), nelem,
integer_one_node)); integer_one_node);
tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
tmp); tmp);
tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)), tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
...@@ -508,8 +508,8 @@ gfc_trans_allocate_temp_array (gfc_loopinfo * loop, gfc_ss_info * info, ...@@ -508,8 +508,8 @@ gfc_trans_allocate_temp_array (gfc_loopinfo * loop, gfc_ss_info * info,
{ {
/* Callee allocated arrays may not have a known bound yet. */ /* Callee allocated arrays may not have a known bound yet. */
if (loop->to[n]) if (loop->to[n])
loop->to[n] = fold (build2 (MINUS_EXPR, gfc_array_index_type, loop->to[n] = fold_build2 (MINUS_EXPR, gfc_array_index_type,
loop->to[n], loop->from[n])); loop->to[n], loop->from[n]);
loop->from[n] = gfc_index_zero_node; loop->from[n] = gfc_index_zero_node;
} }
...@@ -569,18 +569,18 @@ gfc_trans_allocate_temp_array (gfc_loopinfo * loop, gfc_ss_info * info, ...@@ -569,18 +569,18 @@ gfc_trans_allocate_temp_array (gfc_loopinfo * loop, gfc_ss_info * info,
tmp = gfc_conv_descriptor_ubound (desc, gfc_rank_cst[n]); tmp = gfc_conv_descriptor_ubound (desc, gfc_rank_cst[n]);
gfc_add_modify_expr (&loop->pre, tmp, loop->to[n]); gfc_add_modify_expr (&loop->pre, tmp, loop->to[n]);
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
loop->to[n], gfc_index_one_node)); loop->to[n], gfc_index_one_node);
size = fold (build2 (MULT_EXPR, gfc_array_index_type, size, tmp)); size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
size = gfc_evaluate_now (size, &loop->pre); size = gfc_evaluate_now (size, &loop->pre);
} }
/* Get the size of the array. */ /* Get the size of the array. */
nelem = size; nelem = size;
if (size) if (size)
size = fold (build2 (MULT_EXPR, gfc_array_index_type, size, size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
TYPE_SIZE_UNIT (gfc_get_element_type (type)))); TYPE_SIZE_UNIT (gfc_get_element_type (type)));
gfc_trans_allocate_array_storage (loop, info, size, nelem); gfc_trans_allocate_array_storage (loop, info, size, nelem);
...@@ -765,8 +765,8 @@ gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type, ...@@ -765,8 +765,8 @@ gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
gfc_trans_array_ctor_element (&body, pointer, *poffset, &se, gfc_trans_array_ctor_element (&body, pointer, *poffset, &se,
c->expr); c->expr);
*poffset = fold (build2 (PLUS_EXPR, gfc_array_index_type, *poffset = fold_build2 (PLUS_EXPR, gfc_array_index_type,
*poffset, gfc_index_one_node)); *poffset, gfc_index_one_node);
} }
else else
{ {
...@@ -832,8 +832,8 @@ gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type, ...@@ -832,8 +832,8 @@ gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
tmp); tmp);
gfc_add_expr_to_block (&body, tmp); gfc_add_expr_to_block (&body, tmp);
*poffset = fold (build2 (PLUS_EXPR, gfc_array_index_type, *poffset = fold_build2 (PLUS_EXPR, gfc_array_index_type,
*poffset, bound)); *poffset, bound);
} }
if (!INTEGER_CST_P (*poffset)) if (!INTEGER_CST_P (*poffset))
{ {
...@@ -1399,9 +1399,9 @@ gfc_conv_array_index_ref (gfc_se * se, tree pointer, tree * indices, ...@@ -1399,9 +1399,9 @@ gfc_conv_array_index_ref (gfc_se * se, tree pointer, tree * indices,
{ {
/* index = index + stride[n]*indices[n] */ /* index = index + stride[n]*indices[n] */
tmp = gfc_conv_array_stride (se->expr, n); tmp = gfc_conv_array_stride (se->expr, n);
tmp = fold (build2 (MULT_EXPR, gfc_array_index_type, indices[n], tmp)); tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, indices[n], tmp);
index = fold (build2 (PLUS_EXPR, gfc_array_index_type, index, tmp)); index = fold_build2 (PLUS_EXPR, gfc_array_index_type, index, tmp);
} }
/* Result = data[index]. */ /* Result = data[index]. */
...@@ -1429,11 +1429,11 @@ gfc_trans_array_bound_check (gfc_se * se, tree descriptor, tree index, int n) ...@@ -1429,11 +1429,11 @@ gfc_trans_array_bound_check (gfc_se * se, tree descriptor, tree index, int n)
index = gfc_evaluate_now (index, &se->pre); index = gfc_evaluate_now (index, &se->pre);
/* Check lower bound. */ /* Check lower bound. */
tmp = gfc_conv_array_lbound (descriptor, n); tmp = gfc_conv_array_lbound (descriptor, n);
fault = fold (build2 (LT_EXPR, boolean_type_node, index, tmp)); fault = fold_build2 (LT_EXPR, boolean_type_node, index, tmp);
/* Check upper bound. */ /* Check upper bound. */
tmp = gfc_conv_array_ubound (descriptor, n); tmp = gfc_conv_array_ubound (descriptor, n);
cond = fold (build2 (GT_EXPR, boolean_type_node, index, tmp)); cond = fold_build2 (GT_EXPR, boolean_type_node, index, tmp);
fault = fold (build2 (TRUTH_OR_EXPR, boolean_type_node, fault, cond)); fault = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, fault, cond);
gfc_trans_runtime_check (fault, gfc_strconst_fault, &se->pre); gfc_trans_runtime_check (fault, gfc_strconst_fault, &se->pre);
...@@ -1528,10 +1528,10 @@ gfc_conv_array_index_offset (gfc_se * se, gfc_ss_info * info, int dim, int i, ...@@ -1528,10 +1528,10 @@ gfc_conv_array_index_offset (gfc_se * se, gfc_ss_info * info, int dim, int i,
/* Multiply the loop variable by the stride and delta. */ /* Multiply the loop variable by the stride and delta. */
index = se->loop->loopvar[i]; index = se->loop->loopvar[i];
index = fold (build2 (MULT_EXPR, gfc_array_index_type, index, index = fold_build2 (MULT_EXPR, gfc_array_index_type, index,
info->stride[i])); info->stride[i]);
index = fold (build2 (PLUS_EXPR, gfc_array_index_type, index, index = fold_build2 (PLUS_EXPR, gfc_array_index_type, index,
info->delta[i])); info->delta[i]);
if (ar->dimen_type[dim] == DIMEN_VECTOR) if (ar->dimen_type[dim] == DIMEN_VECTOR)
{ {
...@@ -1552,12 +1552,12 @@ gfc_conv_array_index_offset (gfc_se * se, gfc_ss_info * info, int dim, int i, ...@@ -1552,12 +1552,12 @@ gfc_conv_array_index_offset (gfc_se * se, gfc_ss_info * info, int dim, int i,
gcc_assert (se->loop); gcc_assert (se->loop);
index = se->loop->loopvar[se->loop->order[i]]; index = se->loop->loopvar[se->loop->order[i]];
if (!integer_zerop (info->delta[i])) if (!integer_zerop (info->delta[i]))
index = fold (build2 (PLUS_EXPR, gfc_array_index_type, index = fold_build2 (PLUS_EXPR, gfc_array_index_type,
index, info->delta[i])); index, info->delta[i]);
} }
/* Multiply by the stride. */ /* Multiply by the stride. */
index = fold (build2 (MULT_EXPR, gfc_array_index_type, index, stride)); index = fold_build2 (MULT_EXPR, gfc_array_index_type, index, stride);
return index; return index;
} }
...@@ -1583,7 +1583,7 @@ gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar) ...@@ -1583,7 +1583,7 @@ gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
info->stride0); info->stride0);
/* Add the offset for this dimension to the stored offset for all other /* Add the offset for this dimension to the stored offset for all other
dimensions. */ dimensions. */
index = fold (build2 (PLUS_EXPR, gfc_array_index_type, index, info->offset)); index = fold_build2 (PLUS_EXPR, gfc_array_index_type, index, info->offset);
tmp = gfc_build_indirect_ref (info->data); tmp = gfc_build_indirect_ref (info->data);
se->expr = gfc_build_array_ref (tmp, index); se->expr = gfc_build_array_ref (tmp, index);
...@@ -1643,25 +1643,25 @@ gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar) ...@@ -1643,25 +1643,25 @@ gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar)
indexse.expr = gfc_evaluate_now (indexse.expr, &se->pre); indexse.expr = gfc_evaluate_now (indexse.expr, &se->pre);
tmp = gfc_conv_array_lbound (se->expr, n); tmp = gfc_conv_array_lbound (se->expr, n);
cond = fold (build2 (LT_EXPR, boolean_type_node, cond = fold_build2 (LT_EXPR, boolean_type_node,
indexse.expr, tmp)); indexse.expr, tmp);
fault = fault =
fold (build2 (TRUTH_OR_EXPR, boolean_type_node, fault, cond)); fold_build2 (TRUTH_OR_EXPR, boolean_type_node, fault, cond);
tmp = gfc_conv_array_ubound (se->expr, n); tmp = gfc_conv_array_ubound (se->expr, n);
cond = fold (build2 (GT_EXPR, boolean_type_node, cond = fold_build2 (GT_EXPR, boolean_type_node,
indexse.expr, tmp)); indexse.expr, tmp);
fault = fault =
fold (build2 (TRUTH_OR_EXPR, boolean_type_node, fault, cond)); fold_build2 (TRUTH_OR_EXPR, boolean_type_node, fault, cond);
} }
/* Multiply the index by the stride. */ /* Multiply the index by the stride. */
stride = gfc_conv_array_stride (se->expr, n); stride = gfc_conv_array_stride (se->expr, n);
tmp = fold (build2 (MULT_EXPR, gfc_array_index_type, indexse.expr, tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, indexse.expr,
stride)); stride);
/* And add it to the total. */ /* And add it to the total. */
index = fold (build2 (PLUS_EXPR, gfc_array_index_type, index, tmp)); index = fold_build2 (PLUS_EXPR, gfc_array_index_type, index, tmp);
} }
if (flag_bounds_check) if (flag_bounds_check)
...@@ -1669,7 +1669,7 @@ gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar) ...@@ -1669,7 +1669,7 @@ gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar)
tmp = gfc_conv_array_offset (se->expr); tmp = gfc_conv_array_offset (se->expr);
if (!integer_zerop (tmp)) if (!integer_zerop (tmp))
index = fold (build2 (PLUS_EXPR, gfc_array_index_type, index, tmp)); index = fold_build2 (PLUS_EXPR, gfc_array_index_type, index, tmp);
/* Access the calculated element. */ /* Access the calculated element. */
tmp = gfc_conv_array_data (se->expr); tmp = gfc_conv_array_data (se->expr);
...@@ -1730,8 +1730,8 @@ gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag, ...@@ -1730,8 +1730,8 @@ gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
stride); stride);
gfc_add_block_to_block (pblock, &se.pre); gfc_add_block_to_block (pblock, &se.pre);
info->offset = fold (build2 (PLUS_EXPR, gfc_array_index_type, info->offset = fold_build2 (PLUS_EXPR, gfc_array_index_type,
info->offset, index)); info->offset, index);
info->offset = gfc_evaluate_now (info->offset, pblock); info->offset = gfc_evaluate_now (info->offset, pblock);
} }
...@@ -1769,8 +1769,8 @@ gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag, ...@@ -1769,8 +1769,8 @@ gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
index = gfc_conv_array_index_offset (&se, info, info->dim[i], i, index = gfc_conv_array_index_offset (&se, info, info->dim[i], i,
ar, stride); ar, stride);
gfc_add_block_to_block (pblock, &se.pre); gfc_add_block_to_block (pblock, &se.pre);
info->offset = fold (build2 (PLUS_EXPR, gfc_array_index_type, info->offset = fold_build2 (PLUS_EXPR, gfc_array_index_type,
info->offset, index)); info->offset, index);
info->offset = gfc_evaluate_now (info->offset, pblock); info->offset = gfc_evaluate_now (info->offset, pblock);
} }
...@@ -2168,28 +2168,28 @@ gfc_conv_ss_startstride (gfc_loopinfo * loop) ...@@ -2168,28 +2168,28 @@ gfc_conv_ss_startstride (gfc_loopinfo * loop)
/* Check lower bound. */ /* Check lower bound. */
bound = gfc_conv_array_lbound (desc, dim); bound = gfc_conv_array_lbound (desc, dim);
tmp = info->start[n]; tmp = info->start[n];
tmp = fold (build2 (LT_EXPR, boolean_type_node, tmp, bound)); tmp = fold_build2 (LT_EXPR, boolean_type_node, tmp, bound);
fault = fold (build2 (TRUTH_OR_EXPR, boolean_type_node, fault, fault = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, fault,
tmp)); tmp);
/* Check the upper bound. */ /* Check the upper bound. */
bound = gfc_conv_array_ubound (desc, dim); bound = gfc_conv_array_ubound (desc, dim);
end = gfc_conv_section_upper_bound (ss, n, &block); end = gfc_conv_section_upper_bound (ss, n, &block);
tmp = fold (build2 (GT_EXPR, boolean_type_node, end, bound)); tmp = fold_build2 (GT_EXPR, boolean_type_node, end, bound);
fault = fold (build2 (TRUTH_OR_EXPR, boolean_type_node, fault, fault = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, fault,
tmp)); tmp);
/* Check the section sizes match. */ /* Check the section sizes match. */
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, end, tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, end,
info->start[n])); info->start[n]);
tmp = fold (build2 (FLOOR_DIV_EXPR, gfc_array_index_type, tmp, tmp = fold_build2 (FLOOR_DIV_EXPR, gfc_array_index_type, tmp,
info->stride[n])); info->stride[n]);
/* We remember the size of the first section, and check all the /* We remember the size of the first section, and check all the
others against this. */ others against this. */
if (size[n]) if (size[n])
{ {
tmp = tmp =
fold (build2 (NE_EXPR, boolean_type_node, tmp, size[n])); fold_build2 (NE_EXPR, boolean_type_node, tmp, size[n]);
fault = fault =
build2 (TRUTH_OR_EXPR, boolean_type_node, fault, tmp); build2 (TRUTH_OR_EXPR, boolean_type_node, fault, tmp);
} }
...@@ -2467,10 +2467,10 @@ gfc_conv_loop_setup (gfc_loopinfo * loop) ...@@ -2467,10 +2467,10 @@ gfc_conv_loop_setup (gfc_loopinfo * loop)
/* To = from + (size - 1) * stride. */ /* To = from + (size - 1) * stride. */
tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind); tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
if (!integer_onep (info->stride[n])) if (!integer_onep (info->stride[n]))
tmp = fold (build2 (MULT_EXPR, gfc_array_index_type, tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
tmp, info->stride[n])); tmp, info->stride[n]);
loop->to[n] = fold (build2 (PLUS_EXPR, gfc_array_index_type, loop->to[n] = fold_build2 (PLUS_EXPR, gfc_array_index_type,
loop->from[n], tmp)); loop->from[n], tmp);
} }
else else
{ {
...@@ -2508,10 +2508,10 @@ gfc_conv_loop_setup (gfc_loopinfo * loop) ...@@ -2508,10 +2508,10 @@ gfc_conv_loop_setup (gfc_loopinfo * loop)
with start = 0, this simplifies to with start = 0, this simplifies to
last = end / step; last = end / step;
for (i = 0; i<=last; i++){...}; */ for (i = 0; i<=last; i++){...}; */
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
loop->to[n], loop->from[n])); loop->to[n], loop->from[n]);
tmp = fold (build2 (TRUNC_DIV_EXPR, gfc_array_index_type, tmp = fold_build2 (TRUNC_DIV_EXPR, gfc_array_index_type,
tmp, info->stride[n])); tmp, info->stride[n]);
loop->to[n] = gfc_evaluate_now (tmp, &loop->pre); loop->to[n] = gfc_evaluate_now (tmp, &loop->pre);
/* Make the loop variable start at 0. */ /* Make the loop variable start at 0. */
loop->from[n] = gfc_index_zero_node; loop->from[n] = gfc_index_zero_node;
...@@ -2563,12 +2563,12 @@ gfc_conv_loop_setup (gfc_loopinfo * loop) ...@@ -2563,12 +2563,12 @@ gfc_conv_loop_setup (gfc_loopinfo * loop)
{ {
/* Calculate the offset relative to the loop variable. /* Calculate the offset relative to the loop variable.
First multiply by the stride. */ First multiply by the stride. */
tmp = fold (build2 (MULT_EXPR, gfc_array_index_type, tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
loop->from[n], info->stride[n])); loop->from[n], info->stride[n]);
/* Then subtract this from our starting value. */ /* Then subtract this from our starting value. */
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
info->start[n], tmp)); info->start[n], tmp);
info->delta[n] = gfc_evaluate_now (tmp, &loop->pre); info->delta[n] = gfc_evaluate_now (tmp, &loop->pre);
} }
...@@ -2650,8 +2650,8 @@ gfc_array_init_size (tree descriptor, int rank, tree * poffset, ...@@ -2650,8 +2650,8 @@ gfc_array_init_size (tree descriptor, int rank, tree * poffset,
gfc_add_modify_expr (pblock, tmp, se.expr); gfc_add_modify_expr (pblock, tmp, se.expr);
/* Work out the offset for this component. */ /* Work out the offset for this component. */
tmp = fold (build2 (MULT_EXPR, gfc_array_index_type, se.expr, stride)); tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, se.expr, stride);
offset = fold (build2 (MINUS_EXPR, gfc_array_index_type, offset, tmp)); offset = fold_build2 (MINUS_EXPR, gfc_array_index_type, offset, tmp);
/* Start the calculation for the size of this dimension. */ /* Start the calculation for the size of this dimension. */
size = build2 (MINUS_EXPR, gfc_array_index_type, size = build2 (MINUS_EXPR, gfc_array_index_type,
...@@ -2671,17 +2671,17 @@ gfc_array_init_size (tree descriptor, int rank, tree * poffset, ...@@ -2671,17 +2671,17 @@ gfc_array_init_size (tree descriptor, int rank, tree * poffset,
gfc_add_modify_expr (pblock, tmp, stride); gfc_add_modify_expr (pblock, tmp, stride);
/* Calculate the size of this dimension. */ /* Calculate the size of this dimension. */
size = fold (build2 (PLUS_EXPR, gfc_array_index_type, se.expr, size)); size = fold_build2 (PLUS_EXPR, gfc_array_index_type, se.expr, size);
/* Multiply the stride by the number of elements in this dimension. */ /* Multiply the stride by the number of elements in this dimension. */
stride = fold (build2 (MULT_EXPR, gfc_array_index_type, stride, size)); stride = fold_build2 (MULT_EXPR, gfc_array_index_type, stride, size);
stride = gfc_evaluate_now (stride, pblock); stride = gfc_evaluate_now (stride, pblock);
} }
/* The stride is the number of elements in the array, so multiply by the /* The stride is the number of elements in the array, so multiply by the
size of an element to get the total size. */ size of an element to get the total size. */
tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type)); tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
size = fold (build2 (MULT_EXPR, gfc_array_index_type, stride, tmp)); size = fold_build2 (MULT_EXPR, gfc_array_index_type, stride, tmp);
if (poffset != NULL) if (poffset != NULL)
{ {
...@@ -2957,8 +2957,8 @@ gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset, ...@@ -2957,8 +2957,8 @@ gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
gfc_add_modify_expr (pblock, ubound, se.expr); gfc_add_modify_expr (pblock, ubound, se.expr);
} }
/* The offset of this dimension. offset = offset - lbound * stride. */ /* The offset of this dimension. offset = offset - lbound * stride. */
tmp = fold (build2 (MULT_EXPR, gfc_array_index_type, lbound, size)); tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, lbound, size);
offset = fold (build2 (MINUS_EXPR, gfc_array_index_type, offset, tmp)); offset = fold_build2 (MINUS_EXPR, gfc_array_index_type, offset, tmp);
/* The size of this dimension, and the stride of the next. */ /* The size of this dimension, and the stride of the next. */
if (dim + 1 < as->rank) if (dim + 1 < as->rank)
...@@ -2969,10 +2969,10 @@ gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset, ...@@ -2969,10 +2969,10 @@ gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride))) if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
{ {
/* Calculate stride = size * (ubound + 1 - lbound). */ /* Calculate stride = size * (ubound + 1 - lbound). */
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
gfc_index_one_node, lbound)); gfc_index_one_node, lbound);
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, ubound, tmp)); tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, ubound, tmp);
tmp = fold (build2 (MULT_EXPR, gfc_array_index_type, size, tmp)); tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
if (stride) if (stride)
gfc_add_modify_expr (pblock, stride, tmp); gfc_add_modify_expr (pblock, stride, tmp);
else else
...@@ -3045,7 +3045,7 @@ gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym, tree fnbody) ...@@ -3045,7 +3045,7 @@ gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym, tree fnbody)
/* The size is the number of elements in the array, so multiply by the /* The size is the number of elements in the array, so multiply by the
size of an element to get the total size. */ size of an element to get the total size. */
tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type)); tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
size = fold (build2 (MULT_EXPR, gfc_array_index_type, size, tmp)); size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
/* Allocate memory to hold the data. */ /* Allocate memory to hold the data. */
tmp = gfc_chainon_list (NULL_TREE, size); tmp = gfc_chainon_list (NULL_TREE, size);
...@@ -3203,7 +3203,7 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body) ...@@ -3203,7 +3203,7 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
partial = gfc_create_var (boolean_type_node, "partial"); partial = gfc_create_var (boolean_type_node, "partial");
TREE_USED (partial) = 1; TREE_USED (partial) = 1;
tmp = gfc_conv_descriptor_stride (dumdesc, gfc_rank_cst[0]); tmp = gfc_conv_descriptor_stride (dumdesc, gfc_rank_cst[0]);
tmp = fold (build2 (EQ_EXPR, boolean_type_node, tmp, integer_one_node)); tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, integer_one_node);
gfc_add_modify_expr (&block, partial, tmp); gfc_add_modify_expr (&block, partial, tmp);
} }
else else
...@@ -3304,11 +3304,11 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body) ...@@ -3304,11 +3304,11 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
{ {
/* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */ /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
ubound, lbound)); ubound, lbound);
stride = build2 (MINUS_EXPR, gfc_array_index_type, stride = build2 (MINUS_EXPR, gfc_array_index_type,
dubound, dlbound); dubound, dlbound);
tmp = fold (build2 (NE_EXPR, gfc_array_index_type, tmp, stride)); tmp = fold_build2 (NE_EXPR, gfc_array_index_type, tmp, stride);
gfc_trans_runtime_check (tmp, gfc_strconst_bounds, &block); gfc_trans_runtime_check (tmp, gfc_strconst_bounds, &block);
} }
} }
...@@ -3317,12 +3317,12 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body) ...@@ -3317,12 +3317,12 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
/* For assumed shape arrays move the upper bound by the same amount /* For assumed shape arrays move the upper bound by the same amount
as the lower bound. */ as the lower bound. */
tmp = build2 (MINUS_EXPR, gfc_array_index_type, dubound, dlbound); tmp = build2 (MINUS_EXPR, gfc_array_index_type, dubound, dlbound);
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp, lbound)); tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, tmp, lbound);
gfc_add_modify_expr (&block, ubound, tmp); gfc_add_modify_expr (&block, ubound, tmp);
} }
/* The offset of this dimension. offset = offset - lbound * stride. */ /* The offset of this dimension. offset = offset - lbound * stride. */
tmp = fold (build2 (MULT_EXPR, gfc_array_index_type, lbound, stride)); tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, lbound, stride);
offset = fold (build2 (MINUS_EXPR, gfc_array_index_type, offset, tmp)); offset = fold_build2 (MINUS_EXPR, gfc_array_index_type, offset, tmp);
/* The size of this dimension, and the stride of the next. */ /* The size of this dimension, and the stride of the next. */
if (n + 1 < sym->as->rank) if (n + 1 < sym->as->rank)
...@@ -3343,12 +3343,12 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body) ...@@ -3343,12 +3343,12 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
else else
{ {
/* Calculate stride = size * (ubound + 1 - lbound). */ /* Calculate stride = size * (ubound + 1 - lbound). */
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
gfc_index_one_node, lbound)); gfc_index_one_node, lbound);
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
ubound, tmp)); ubound, tmp);
size = fold (build2 (MULT_EXPR, gfc_array_index_type, size = fold_build2 (MULT_EXPR, gfc_array_index_type,
size, tmp)); size, tmp);
stmt_packed = size; stmt_packed = size;
} }
...@@ -3759,10 +3759,10 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss) ...@@ -3759,10 +3759,10 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
} }
tmp = gfc_conv_array_lbound (desc, n); tmp = gfc_conv_array_lbound (desc, n);
tmp = fold (build2 (MINUS_EXPR, TREE_TYPE (tmp), start, tmp)); tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (tmp), start, tmp);
tmp = fold (build2 (MULT_EXPR, TREE_TYPE (tmp), tmp, stride)); tmp = fold_build2 (MULT_EXPR, TREE_TYPE (tmp), tmp, stride);
offset = fold (build2 (PLUS_EXPR, TREE_TYPE (tmp), offset, tmp)); offset = fold_build2 (PLUS_EXPR, TREE_TYPE (tmp), offset, tmp);
if (info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT) if (info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
{ {
...@@ -3779,9 +3779,9 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss) ...@@ -3779,9 +3779,9 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
if (!integer_onep (from)) if (!integer_onep (from))
{ {
/* Make sure the new section starts at 1. */ /* Make sure the new section starts at 1. */
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
gfc_index_one_node, from)); gfc_index_one_node, from);
to = fold (build2 (PLUS_EXPR, gfc_array_index_type, to, tmp)); to = fold_build2 (PLUS_EXPR, gfc_array_index_type, to, tmp);
from = gfc_index_one_node; from = gfc_index_one_node;
} }
tmp = gfc_conv_descriptor_lbound (parm, gfc_rank_cst[dim]); tmp = gfc_conv_descriptor_lbound (parm, gfc_rank_cst[dim]);
...@@ -3793,12 +3793,12 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss) ...@@ -3793,12 +3793,12 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
/* Multiply the stride by the section stride to get the /* Multiply the stride by the section stride to get the
total stride. */ total stride. */
stride = fold (build2 (MULT_EXPR, gfc_array_index_type, stride = fold_build2 (MULT_EXPR, gfc_array_index_type,
stride, info->stride[dim])); stride, info->stride[dim]);
if (se->direct_byref) if (se->direct_byref)
base = fold (build2 (MINUS_EXPR, TREE_TYPE (base), base = fold_build2 (MINUS_EXPR, TREE_TYPE (base),
base, stride)); base, stride);
/* Store the new stride. */ /* Store the new stride. */
tmp = gfc_conv_descriptor_stride (parm, gfc_rank_cst[dim]); tmp = gfc_conv_descriptor_stride (parm, gfc_rank_cst[dim]);
......
...@@ -513,7 +513,7 @@ gfc_conv_powi (gfc_se * se, int n, tree * tmpvar) ...@@ -513,7 +513,7 @@ gfc_conv_powi (gfc_se * se, int n, tree * tmpvar)
op1 = op0; op1 = op0;
} }
tmp = fold (build2 (MULT_EXPR, TREE_TYPE (op0), op0, op1)); tmp = fold_build2 (MULT_EXPR, TREE_TYPE (op0), op0, op1);
tmp = gfc_evaluate_now (tmp, &se->pre); tmp = gfc_evaluate_now (tmp, &se->pre);
if (n < POWI_TABLE_SIZE) if (n < POWI_TABLE_SIZE)
...@@ -738,9 +738,8 @@ gfc_conv_string_tmp (gfc_se * se, tree type, tree len) ...@@ -738,9 +738,8 @@ gfc_conv_string_tmp (gfc_se * se, tree type, tree len)
if (gfc_can_put_var_on_stack (len)) if (gfc_can_put_var_on_stack (len))
{ {
/* Create a temporary variable to hold the result. */ /* Create a temporary variable to hold the result. */
tmp = fold (build2 (MINUS_EXPR, gfc_charlen_type_node, len, tmp = fold_build2 (MINUS_EXPR, gfc_charlen_type_node, len,
convert (gfc_charlen_type_node, convert (gfc_charlen_type_node, integer_one_node));
integer_one_node)));
tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp); tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
tmp = build_array_type (gfc_character1_type_node, tmp); tmp = build_array_type (gfc_character1_type_node, tmp);
var = gfc_create_var (tmp, "str"); var = gfc_create_var (tmp, "str");
...@@ -797,8 +796,8 @@ gfc_conv_concat_op (gfc_se * se, gfc_expr * expr) ...@@ -797,8 +796,8 @@ gfc_conv_concat_op (gfc_se * se, gfc_expr * expr)
len = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); len = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
if (len == NULL_TREE) if (len == NULL_TREE)
{ {
len = fold (build2 (PLUS_EXPR, TREE_TYPE (lse.string_length), len = fold_build2 (PLUS_EXPR, TREE_TYPE (lse.string_length),
lse.string_length, rse.string_length)); lse.string_length, rse.string_length);
} }
type = build_pointer_type (type); type = build_pointer_type (type);
...@@ -990,11 +989,11 @@ gfc_conv_expr_op (gfc_se * se, gfc_expr * expr) ...@@ -990,11 +989,11 @@ gfc_conv_expr_op (gfc_se * se, gfc_expr * expr)
if (lop) if (lop)
{ {
/* The result of logical ops is always boolean_type_node. */ /* The result of logical ops is always boolean_type_node. */
tmp = fold (build2 (code, type, lse.expr, rse.expr)); tmp = fold_build2 (code, type, lse.expr, rse.expr);
se->expr = convert (type, tmp); se->expr = convert (type, tmp);
} }
else else
se->expr = fold (build2 (code, type, lse.expr, rse.expr)); se->expr = fold_build2 (code, type, lse.expr, rse.expr);
/* Add the post blocks. */ /* Add the post blocks. */
gfc_add_block_to_block (&se->post, &rse.post); gfc_add_block_to_block (&se->post, &rse.post);
......
...@@ -264,11 +264,11 @@ build_round_expr (stmtblock_t * pblock, tree arg, tree type) ...@@ -264,11 +264,11 @@ build_round_expr (stmtblock_t * pblock, tree arg, tree type)
neg = build_real (argtype, r); neg = build_real (argtype, r);
tmp = gfc_build_const (argtype, integer_zero_node); tmp = gfc_build_const (argtype, integer_zero_node);
cond = fold (build2 (GT_EXPR, boolean_type_node, arg, tmp)); cond = fold_build2 (GT_EXPR, boolean_type_node, arg, tmp);
tmp = fold (build3 (COND_EXPR, argtype, cond, pos, neg)); tmp = fold_build3 (COND_EXPR, argtype, cond, pos, neg);
tmp = fold (build2 (PLUS_EXPR, argtype, arg, tmp)); tmp = fold_build2 (PLUS_EXPR, argtype, arg, tmp);
return fold (build1 (FIX_TRUNC_EXPR, type, tmp)); return fold_build1 (FIX_TRUNC_EXPR, type, tmp);
} }
...@@ -645,8 +645,8 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper) ...@@ -645,8 +645,8 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper)
gcc_assert (se->ss->expr == expr); gcc_assert (se->ss->expr == expr);
gfc_advance_se_ss_chain (se); gfc_advance_se_ss_chain (se);
bound = se->loop->loopvar[0]; bound = se->loop->loopvar[0];
bound = fold (build2 (MINUS_EXPR, gfc_array_index_type, bound, bound = fold_build2 (MINUS_EXPR, gfc_array_index_type, bound,
se->loop->from[0])); se->loop->from[0]);
} }
else else
{ {
...@@ -657,8 +657,8 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper) ...@@ -657,8 +657,8 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper)
gfc_add_block_to_block (&se->pre, &argse.pre); gfc_add_block_to_block (&se->pre, &argse.pre);
bound = argse.expr; bound = argse.expr;
/* Convert from one based to zero based. */ /* Convert from one based to zero based. */
bound = fold (build2 (MINUS_EXPR, gfc_array_index_type, bound, bound = fold_build2 (MINUS_EXPR, gfc_array_index_type, bound,
gfc_index_one_node)); gfc_index_one_node);
} }
/* TODO: don't re-evaluate the descriptor on each iteration. */ /* TODO: don't re-evaluate the descriptor on each iteration. */
...@@ -683,11 +683,11 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper) ...@@ -683,11 +683,11 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper)
if (flag_bounds_check) if (flag_bounds_check)
{ {
bound = gfc_evaluate_now (bound, &se->pre); bound = gfc_evaluate_now (bound, &se->pre);
cond = fold (build2 (LT_EXPR, boolean_type_node, cond = fold_build2 (LT_EXPR, boolean_type_node,
bound, build_int_cst (TREE_TYPE (bound), 0))); bound, build_int_cst (TREE_TYPE (bound), 0));
tmp = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (TREE_TYPE (desc))]; tmp = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (TREE_TYPE (desc))];
tmp = fold (build2 (GE_EXPR, boolean_type_node, bound, tmp)); tmp = fold_build2 (GE_EXPR, boolean_type_node, bound, tmp);
cond = fold(build2 (TRUTH_ORIF_EXPR, boolean_type_node, cond, tmp)); cond = fold_build2 (TRUTH_ORIF_EXPR, boolean_type_node, cond, tmp);
gfc_trans_runtime_check (cond, gfc_strconst_fault, &se->pre); gfc_trans_runtime_check (cond, gfc_strconst_fault, &se->pre);
} }
} }
...@@ -765,7 +765,7 @@ gfc_conv_intrinsic_cmplx (gfc_se * se, gfc_expr * expr, int both) ...@@ -765,7 +765,7 @@ gfc_conv_intrinsic_cmplx (gfc_se * se, gfc_expr * expr, int both)
else else
imag = build_real_from_int_cst (TREE_TYPE (type), integer_zero_node); imag = build_real_from_int_cst (TREE_TYPE (type), integer_zero_node);
se->expr = fold (build2 (COMPLEX_EXPR, type, real, imag)); se->expr = fold_build2 (COMPLEX_EXPR, type, real, imag);
} }
/* Remainder function MOD(A, P) = A - INT(A / P) * P /* Remainder function MOD(A, P) = A - INT(A / P) * P
...@@ -903,11 +903,11 @@ gfc_conv_intrinsic_sign (gfc_se * se, gfc_expr * expr) ...@@ -903,11 +903,11 @@ gfc_conv_intrinsic_sign (gfc_se * se, gfc_expr * expr)
type = TREE_TYPE (arg); type = TREE_TYPE (arg);
zero = gfc_build_const (type, integer_zero_node); zero = gfc_build_const (type, integer_zero_node);
testa = fold (build2 (GE_EXPR, boolean_type_node, arg, zero)); testa = fold_build2 (GE_EXPR, boolean_type_node, arg, zero);
testb = fold (build2 (GE_EXPR, boolean_type_node, arg2, zero)); testb = fold_build2 (GE_EXPR, boolean_type_node, arg2, zero);
tmp = fold (build2 (TRUTH_XOR_EXPR, boolean_type_node, testa, testb)); tmp = fold_build2 (TRUTH_XOR_EXPR, boolean_type_node, testa, testb);
se->expr = fold (build3 (COND_EXPR, type, tmp, se->expr = fold_build3 (COND_EXPR, type, tmp,
build1 (NEGATE_EXPR, type, arg), arg)); build1 (NEGATE_EXPR, type, arg), arg);
} }
...@@ -1433,7 +1433,7 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op) ...@@ -1433,7 +1433,7 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op)
/* Most negative(+HUGE) for maxval, most negative (-HUGE) for minval. */ /* Most negative(+HUGE) for maxval, most negative (-HUGE) for minval. */
if (op == GT_EXPR) if (op == GT_EXPR)
tmp = fold (build1 (NEGATE_EXPR, TREE_TYPE (tmp), tmp)); tmp = fold_build1 (NEGATE_EXPR, TREE_TYPE (tmp), tmp);
gfc_add_modify_expr (&se->pre, limit, tmp); gfc_add_modify_expr (&se->pre, limit, tmp);
/* Initialize the scalarizer. */ /* Initialize the scalarizer. */
...@@ -1452,12 +1452,12 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op) ...@@ -1452,12 +1452,12 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op)
size we need to return zero. Otherwise use the first element of the size we need to return zero. Otherwise use the first element of the
array, in case all elements are equal to the limit. array, in case all elements are equal to the limit.
i.e. pos = (ubound >= lbound) ? lbound, lbound - 1; */ i.e. pos = (ubound >= lbound) ? lbound, lbound - 1; */
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
loop.from[0], gfc_index_one_node)); loop.from[0], gfc_index_one_node);
cond = fold (build2 (GE_EXPR, boolean_type_node, cond = fold_build2 (GE_EXPR, boolean_type_node,
loop.to[0], loop.from[0])); loop.to[0], loop.from[0]);
tmp = fold (build3 (COND_EXPR, gfc_array_index_type, cond, tmp = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
loop.from[0], tmp)); loop.from[0], tmp);
gfc_add_modify_expr (&loop.pre, pos, tmp); gfc_add_modify_expr (&loop.pre, pos, tmp);
gfc_mark_ss_chain_used (arrayss, 1); gfc_mark_ss_chain_used (arrayss, 1);
...@@ -1521,9 +1521,9 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op) ...@@ -1521,9 +1521,9 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op)
gfc_cleanup_loop (&loop); gfc_cleanup_loop (&loop);
/* Return a value in the range 1..SIZE(array). */ /* Return a value in the range 1..SIZE(array). */
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, loop.from[0], tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, loop.from[0],
gfc_index_one_node)); gfc_index_one_node);
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, pos, tmp)); tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, pos, tmp);
/* And convert to the required type. */ /* And convert to the required type. */
se->expr = convert (type, tmp); se->expr = convert (type, tmp);
} }
...@@ -1573,7 +1573,7 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, int op) ...@@ -1573,7 +1573,7 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, int op)
/* Most negative(-HUGE) for maxval, most positive (-HUGE) for minval. */ /* Most negative(-HUGE) for maxval, most positive (-HUGE) for minval. */
if (op == GT_EXPR) if (op == GT_EXPR)
tmp = fold (build1 (NEGATE_EXPR, TREE_TYPE (tmp), tmp)); tmp = fold_build1 (NEGATE_EXPR, TREE_TYPE (tmp), tmp);
gfc_add_modify_expr (&se->pre, limit, tmp); gfc_add_modify_expr (&se->pre, limit, tmp);
/* Walk the arguments. */ /* Walk the arguments. */
...@@ -1670,8 +1670,8 @@ gfc_conv_intrinsic_btest (gfc_se * se, gfc_expr * expr) ...@@ -1670,8 +1670,8 @@ gfc_conv_intrinsic_btest (gfc_se * se, gfc_expr * expr)
tmp = build2 (LSHIFT_EXPR, type, build_int_cst (type, 1), arg2); tmp = build2 (LSHIFT_EXPR, type, build_int_cst (type, 1), arg2);
tmp = build2 (BIT_AND_EXPR, type, arg, tmp); tmp = build2 (BIT_AND_EXPR, type, arg, tmp);
tmp = fold (build2 (NE_EXPR, boolean_type_node, tmp, tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
build_int_cst (type, 0))); build_int_cst (type, 0));
type = gfc_typenode_for_spec (&expr->ts); type = gfc_typenode_for_spec (&expr->ts);
se->expr = convert (type, tmp); se->expr = convert (type, tmp);
} }
...@@ -1689,7 +1689,7 @@ gfc_conv_intrinsic_bitop (gfc_se * se, gfc_expr * expr, int op) ...@@ -1689,7 +1689,7 @@ gfc_conv_intrinsic_bitop (gfc_se * se, gfc_expr * expr, int op)
arg = TREE_VALUE (arg); arg = TREE_VALUE (arg);
type = TREE_TYPE (arg); type = TREE_TYPE (arg);
se->expr = fold (build2 (op, type, arg, arg2)); se->expr = fold_build2 (op, type, arg, arg2);
} }
/* Bitwise not. */ /* Bitwise not. */
...@@ -1719,15 +1719,15 @@ gfc_conv_intrinsic_singlebitop (gfc_se * se, gfc_expr * expr, int set) ...@@ -1719,15 +1719,15 @@ gfc_conv_intrinsic_singlebitop (gfc_se * se, gfc_expr * expr, int set)
arg = TREE_VALUE (arg); arg = TREE_VALUE (arg);
type = TREE_TYPE (arg); type = TREE_TYPE (arg);
tmp = fold (build2 (LSHIFT_EXPR, type, build_int_cst (type, 1), arg2)); tmp = fold_build2 (LSHIFT_EXPR, type, build_int_cst (type, 1), arg2);
if (set) if (set)
op = BIT_IOR_EXPR; op = BIT_IOR_EXPR;
else else
{ {
op = BIT_AND_EXPR; op = BIT_AND_EXPR;
tmp = fold (build1 (BIT_NOT_EXPR, type, tmp)); tmp = fold_build1 (BIT_NOT_EXPR, type, tmp);
} }
se->expr = fold (build2 (op, type, arg, tmp)); se->expr = fold_build2 (op, type, arg, tmp);
} }
/* Extract a sequence of bits. /* Extract a sequence of bits.
...@@ -1755,7 +1755,7 @@ gfc_conv_intrinsic_ibits (gfc_se * se, gfc_expr * expr) ...@@ -1755,7 +1755,7 @@ gfc_conv_intrinsic_ibits (gfc_se * se, gfc_expr * expr)
tmp = build2 (RSHIFT_EXPR, type, arg, arg2); tmp = build2 (RSHIFT_EXPR, type, arg, arg2);
se->expr = fold (build2 (BIT_AND_EXPR, type, tmp, mask)); se->expr = fold_build2 (BIT_AND_EXPR, type, tmp, mask);
} }
/* ISHFT (I, SHIFT) = (abs (shift) >= BIT_SIZE (i)) /* ISHFT (I, SHIFT) = (abs (shift) >= BIT_SIZE (i))
...@@ -1782,10 +1782,10 @@ gfc_conv_intrinsic_ishft (gfc_se * se, gfc_expr * expr) ...@@ -1782,10 +1782,10 @@ gfc_conv_intrinsic_ishft (gfc_se * se, gfc_expr * expr)
type = TREE_TYPE (arg); type = TREE_TYPE (arg);
utype = gfc_unsigned_type (type); utype = gfc_unsigned_type (type);
width = fold (build1 (ABS_EXPR, TREE_TYPE (arg2), arg2)); width = fold_build1 (ABS_EXPR, TREE_TYPE (arg2), arg2);
/* Left shift if positive. */ /* Left shift if positive. */
lshift = fold (build2 (LSHIFT_EXPR, type, arg, width)); lshift = fold_build2 (LSHIFT_EXPR, type, arg, width);
/* Right shift if negative. /* Right shift if negative.
We convert to an unsigned type because we want a logical shift. We convert to an unsigned type because we want a logical shift.
...@@ -1795,18 +1795,18 @@ gfc_conv_intrinsic_ishft (gfc_se * se, gfc_expr * expr) ...@@ -1795,18 +1795,18 @@ gfc_conv_intrinsic_ishft (gfc_se * se, gfc_expr * expr)
rshift = fold_convert (type, build2 (RSHIFT_EXPR, utype, rshift = fold_convert (type, build2 (RSHIFT_EXPR, utype,
convert (utype, arg), width)); convert (utype, arg), width));
tmp = fold (build2 (GE_EXPR, boolean_type_node, arg2, tmp = fold_build2 (GE_EXPR, boolean_type_node, arg2,
build_int_cst (TREE_TYPE (arg2), 0))); build_int_cst (TREE_TYPE (arg2), 0));
tmp = fold (build3 (COND_EXPR, type, tmp, lshift, rshift)); tmp = fold_build3 (COND_EXPR, type, tmp, lshift, rshift);
/* The Fortran standard allows shift widths <= BIT_SIZE(I), whereas /* The Fortran standard allows shift widths <= BIT_SIZE(I), whereas
gcc requires a shift width < BIT_SIZE(I), so we have to catch this gcc requires a shift width < BIT_SIZE(I), so we have to catch this
special case. */ special case. */
num_bits = build_int_cst (TREE_TYPE (arg2), TYPE_PRECISION (type)); num_bits = build_int_cst (TREE_TYPE (arg2), TYPE_PRECISION (type));
cond = fold (build2 (GE_EXPR, boolean_type_node, width, num_bits)); cond = fold_build2 (GE_EXPR, boolean_type_node, width, num_bits);
se->expr = fold (build3 (COND_EXPR, type, cond, se->expr = fold_build3 (COND_EXPR, type, cond,
build_int_cst (type, 0), tmp)); build_int_cst (type, 0), tmp);
} }
/* Circular shift. AKA rotate or barrel shift. */ /* Circular shift. AKA rotate or barrel shift. */
...@@ -1872,19 +1872,19 @@ gfc_conv_intrinsic_ishftc (gfc_se * se, gfc_expr * expr) ...@@ -1872,19 +1872,19 @@ gfc_conv_intrinsic_ishftc (gfc_se * se, gfc_expr * expr)
type = TREE_TYPE (arg); type = TREE_TYPE (arg);
/* Rotate left if positive. */ /* Rotate left if positive. */
lrot = fold (build2 (LROTATE_EXPR, type, arg, arg2)); lrot = fold_build2 (LROTATE_EXPR, type, arg, arg2);
/* Rotate right if negative. */ /* Rotate right if negative. */
tmp = fold (build1 (NEGATE_EXPR, TREE_TYPE (arg2), arg2)); tmp = fold_build1 (NEGATE_EXPR, TREE_TYPE (arg2), arg2);
rrot = fold (build2 (RROTATE_EXPR, type, arg, tmp)); rrot = fold_build2 (RROTATE_EXPR, type, arg, tmp);
zero = build_int_cst (TREE_TYPE (arg2), 0); zero = build_int_cst (TREE_TYPE (arg2), 0);
tmp = fold (build2 (GT_EXPR, boolean_type_node, arg2, zero)); tmp = fold_build2 (GT_EXPR, boolean_type_node, arg2, zero);
rrot = fold (build3 (COND_EXPR, type, tmp, lrot, rrot)); rrot = fold_build3 (COND_EXPR, type, tmp, lrot, rrot);
/* Do nothing if shift == 0. */ /* Do nothing if shift == 0. */
tmp = fold (build2 (EQ_EXPR, boolean_type_node, arg2, zero)); tmp = fold_build2 (EQ_EXPR, boolean_type_node, arg2, zero);
se->expr = fold (build3 (COND_EXPR, type, tmp, arg, rrot)); se->expr = fold_build3 (COND_EXPR, type, tmp, arg, rrot);
} }
/* The length of a character string. */ /* The length of a character string. */
...@@ -2037,7 +2037,7 @@ gfc_conv_intrinsic_merge (gfc_se * se, gfc_expr * expr) ...@@ -2037,7 +2037,7 @@ gfc_conv_intrinsic_merge (gfc_se * se, gfc_expr * expr)
se->string_length = len; se->string_length = len;
} }
type = TREE_TYPE (tsource); type = TREE_TYPE (tsource);
se->expr = fold (build3 (COND_EXPR, type, mask, tsource, fsource)); se->expr = fold_build3 (COND_EXPR, type, mask, tsource, fsource);
} }
...@@ -2374,18 +2374,18 @@ prepare_arg_info (gfc_se * se, gfc_expr * expr, ...@@ -2374,18 +2374,18 @@ prepare_arg_info (gfc_se * se, gfc_expr * expr,
rcs->fdigits = convert (masktype, tmp); rcs->fdigits = convert (masktype, tmp);
wbits = build_int_cst (NULL_TREE, TYPE_PRECISION (rcs->type) - 1); wbits = build_int_cst (NULL_TREE, TYPE_PRECISION (rcs->type) - 1);
wbits = convert (masktype, wbits); wbits = convert (masktype, wbits);
rcs->edigits = fold (build2 (MINUS_EXPR, masktype, wbits, tmp)); rcs->edigits = fold_build2 (MINUS_EXPR, masktype, wbits, tmp);
/* Form masks for exponent/fraction/sign */ /* Form masks for exponent/fraction/sign */
one = gfc_build_const (masktype, integer_one_node); one = gfc_build_const (masktype, integer_one_node);
rcs->smask = fold (build2 (LSHIFT_EXPR, masktype, one, wbits)); rcs->smask = fold_build2 (LSHIFT_EXPR, masktype, one, wbits);
rcs->f1 = fold (build2 (LSHIFT_EXPR, masktype, one, rcs->fdigits)); rcs->f1 = fold_build2 (LSHIFT_EXPR, masktype, one, rcs->fdigits);
rcs->emask = fold (build2 (MINUS_EXPR, masktype, rcs->smask, rcs->f1)); rcs->emask = fold_build2 (MINUS_EXPR, masktype, rcs->smask, rcs->f1);
rcs->fmask = fold (build2 (MINUS_EXPR, masktype, rcs->f1, one)); rcs->fmask = fold_build2 (MINUS_EXPR, masktype, rcs->f1, one);
/* Form bias. */ /* Form bias. */
tmp = fold (build2 (MINUS_EXPR, masktype, rcs->edigits, one)); tmp = fold_build2 (MINUS_EXPR, masktype, rcs->edigits, one);
tmp = fold (build2 (LSHIFT_EXPR, masktype, one, tmp)); tmp = fold_build2 (LSHIFT_EXPR, masktype, one, tmp);
rcs->bias = fold (build2 (MINUS_EXPR, masktype, tmp ,one)); rcs->bias = fold_build2 (MINUS_EXPR, masktype, tmp ,one);
if (all) if (all)
{ {
...@@ -2510,7 +2510,7 @@ gfc_conv_intrinsic_rrspacing (gfc_se * se, gfc_expr * expr) ...@@ -2510,7 +2510,7 @@ gfc_conv_intrinsic_rrspacing (gfc_se * se, gfc_expr * expr)
fraction = rcs.frac; fraction = rcs.frac;
one = gfc_build_const (masktype, integer_one_node); one = gfc_build_const (masktype, integer_one_node);
zero = gfc_build_const (masktype, integer_zero_node); zero = gfc_build_const (masktype, integer_zero_node);
t2 = fold (build2 (PLUS_EXPR, masktype, rcs.edigits, one)); t2 = fold_build2 (PLUS_EXPR, masktype, rcs.edigits, one);
t1 = call_builtin_clz (masktype, fraction); t1 = call_builtin_clz (masktype, fraction);
tmp = build2 (PLUS_EXPR, masktype, t1, one); tmp = build2 (PLUS_EXPR, masktype, t1, one);
...@@ -2519,8 +2519,8 @@ gfc_conv_intrinsic_rrspacing (gfc_se * se, gfc_expr * expr) ...@@ -2519,8 +2519,8 @@ gfc_conv_intrinsic_rrspacing (gfc_se * se, gfc_expr * expr)
cond = build2 (EQ_EXPR, boolean_type_node, rcs.expn, zero); cond = build2 (EQ_EXPR, boolean_type_node, rcs.expn, zero);
fraction = build3 (COND_EXPR, masktype, cond, tmp, fraction); fraction = build3 (COND_EXPR, masktype, cond, tmp, fraction);
tmp = fold (build2 (PLUS_EXPR, masktype, rcs.bias, fdigits)); tmp = fold_build2 (PLUS_EXPR, masktype, rcs.bias, fdigits);
tmp = fold (build2 (LSHIFT_EXPR, masktype, tmp, fdigits)); tmp = fold_build2 (LSHIFT_EXPR, masktype, tmp, fdigits);
tmp = build2 (BIT_IOR_EXPR, masktype, tmp, fraction); tmp = build2 (BIT_IOR_EXPR, masktype, tmp, fraction);
cond2 = build2 (EQ_EXPR, boolean_type_node, rcs.frac, zero); cond2 = build2 (EQ_EXPR, boolean_type_node, rcs.frac, zero);
...@@ -2634,7 +2634,7 @@ gfc_conv_intrinsic_repeat (gfc_se * se, gfc_expr * expr) ...@@ -2634,7 +2634,7 @@ gfc_conv_intrinsic_repeat (gfc_se * se, gfc_expr * expr)
len = TREE_VALUE (args); len = TREE_VALUE (args);
tmp = gfc_advance_chain (args, 2); tmp = gfc_advance_chain (args, 2);
ncopies = TREE_VALUE (tmp); ncopies = TREE_VALUE (tmp);
len = fold (build2 (MULT_EXPR, gfc_int4_type_node, len, ncopies)); len = fold_build2 (MULT_EXPR, gfc_int4_type_node, len, ncopies);
type = gfc_get_character_type (expr->ts.kind, expr->ts.cl); type = gfc_get_character_type (expr->ts.kind, expr->ts.cl);
var = gfc_conv_string_tmp (se, build_pointer_type (type), len); var = gfc_conv_string_tmp (se, build_pointer_type (type), len);
......
...@@ -587,9 +587,9 @@ gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar, ...@@ -587,9 +587,9 @@ gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
/* Only execute the loop if the number of iterations is positive. */ /* Only execute the loop if the number of iterations is positive. */
if (tree_int_cst_sgn (step) > 0) if (tree_int_cst_sgn (step) > 0)
cond = fold (build2 (LE_EXPR, boolean_type_node, dovar, to)); cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
else else
cond = fold (build2 (GE_EXPR, boolean_type_node, dovar, to)); cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt ()); tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt ());
gfc_add_expr_to_block (pblock, tmp); gfc_add_expr_to_block (pblock, tmp);
...@@ -685,11 +685,11 @@ gfc_trans_do (gfc_code * code) ...@@ -685,11 +685,11 @@ gfc_trans_do (gfc_code * code)
/* Initialize loop count. This code is executed before we enter the /* Initialize loop count. This code is executed before we enter the
loop body. We generate: count = (to + step - from) / step. */ loop body. We generate: count = (to + step - from) / step. */
tmp = fold (build2 (MINUS_EXPR, type, step, from)); tmp = fold_build2 (MINUS_EXPR, type, step, from);
tmp = fold (build2 (PLUS_EXPR, type, to, tmp)); tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
if (TREE_CODE (type) == INTEGER_TYPE) if (TREE_CODE (type) == INTEGER_TYPE)
{ {
tmp = fold (build2 (TRUNC_DIV_EXPR, type, tmp, step)); tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
count = gfc_create_var (type, "count"); count = gfc_create_var (type, "count");
} }
else else
...@@ -697,8 +697,8 @@ gfc_trans_do (gfc_code * code) ...@@ -697,8 +697,8 @@ gfc_trans_do (gfc_code * code)
/* TODO: We could use the same width as the real type. /* TODO: We could use the same width as the real type.
This would probably cause more problems that it solves This would probably cause more problems that it solves
when we implement "long double" types. */ when we implement "long double" types. */
tmp = fold (build2 (RDIV_EXPR, type, tmp, step)); tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
tmp = fold (build1 (FIX_TRUNC_EXPR, gfc_array_index_type, tmp)); tmp = fold_build1 (FIX_TRUNC_EXPR, gfc_array_index_type, tmp);
count = gfc_create_var (gfc_array_index_type, "count"); count = gfc_create_var (gfc_array_index_type, "count");
} }
gfc_add_modify_expr (&block, count, tmp); gfc_add_modify_expr (&block, count, tmp);
...@@ -810,7 +810,7 @@ gfc_trans_do_while (gfc_code * code) ...@@ -810,7 +810,7 @@ gfc_trans_do_while (gfc_code * code)
gfc_init_se (&cond, NULL); gfc_init_se (&cond, NULL);
gfc_conv_expr_val (&cond, code->expr); gfc_conv_expr_val (&cond, code->expr);
gfc_add_block_to_block (&block, &cond.pre); gfc_add_block_to_block (&block, &cond.pre);
cond.expr = fold (build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr)); cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
/* Build "IF (! cond) GOTO exit_label". */ /* Build "IF (! cond) GOTO exit_label". */
tmp = build1_v (GOTO_EXPR, exit_label); tmp = build1_v (GOTO_EXPR, exit_label);
...@@ -1388,9 +1388,9 @@ gfc_trans_forall_loop (forall_info *forall_tmp, int nvar, tree body, int mask_fl ...@@ -1388,9 +1388,9 @@ gfc_trans_forall_loop (forall_info *forall_tmp, int nvar, tree body, int mask_fl
gfc_add_modify_expr (&block, var, start); gfc_add_modify_expr (&block, var, start);
/* Initialize the loop counter. */ /* Initialize the loop counter. */
tmp = fold (build2 (MINUS_EXPR, TREE_TYPE (var), step, start)); tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp)); tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
tmp = fold (build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step)); tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
gfc_add_modify_expr (&block, count, tmp); gfc_add_modify_expr (&block, count, tmp);
/* The loop expression. */ /* The loop expression. */
...@@ -1479,8 +1479,8 @@ gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock, ...@@ -1479,8 +1479,8 @@ gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
if (INTEGER_CST_P (size)) if (INTEGER_CST_P (size))
{ {
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, size, tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
gfc_index_one_node)); gfc_index_one_node);
} }
else else
tmp = NULL_TREE; tmp = NULL_TREE;
...@@ -1548,7 +1548,7 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size, ...@@ -1548,7 +1548,7 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
gfc_add_block_to_block (&block, &lse.post); gfc_add_block_to_block (&block, &lse.post);
/* Increment the count1. */ /* Increment the count1. */
tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size)); tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size);
gfc_add_modify_expr (&block, count1, tmp); gfc_add_modify_expr (&block, count1, tmp);
tmp = gfc_finish_block (&block); tmp = gfc_finish_block (&block);
} }
...@@ -1582,8 +1582,8 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size, ...@@ -1582,8 +1582,8 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
/* Form the expression of the temporary. */ /* Form the expression of the temporary. */
if (lss != gfc_ss_terminator) if (lss != gfc_ss_terminator)
{ {
index = fold (build2 (PLUS_EXPR, gfc_array_index_type, index = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count1, count2)); count1, count2);
rse.expr = gfc_build_array_ref (tmp1, index); rse.expr = gfc_build_array_ref (tmp1, index);
} }
/* Translate expr. */ /* Translate expr. */
...@@ -1610,15 +1610,15 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size, ...@@ -1610,15 +1610,15 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
gfc_add_expr_to_block (&body, tmp); gfc_add_expr_to_block (&body, tmp);
/* Increment count2. */ /* Increment count2. */
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count2, gfc_index_one_node)); count2, gfc_index_one_node);
gfc_add_modify_expr (&body, count2, tmp); gfc_add_modify_expr (&body, count2, tmp);
/* Increment count3. */ /* Increment count3. */
if (count3) if (count3)
{ {
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count3, gfc_index_one_node)); count3, gfc_index_one_node);
gfc_add_modify_expr (&body, count3, tmp); gfc_add_modify_expr (&body, count3, tmp);
} }
...@@ -1629,7 +1629,7 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size, ...@@ -1629,7 +1629,7 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
gfc_cleanup_loop (&loop1); gfc_cleanup_loop (&loop1);
/* Increment count1. */ /* Increment count1. */
tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size)); tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size);
gfc_add_modify_expr (&block, count1, tmp); gfc_add_modify_expr (&block, count1, tmp);
tmp = gfc_finish_block (&block); tmp = gfc_finish_block (&block);
} }
...@@ -1689,7 +1689,7 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size, ...@@ -1689,7 +1689,7 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size,
gfc_conv_expr (&rse, expr2); gfc_conv_expr (&rse, expr2);
/* Form the expression of the temporary. */ /* Form the expression of the temporary. */
index = fold (build2 (PLUS_EXPR, gfc_array_index_type, count1, count2)); index = fold_build2 (PLUS_EXPR, gfc_array_index_type, count1, count2);
lse.expr = gfc_build_array_ref (tmp1, index); lse.expr = gfc_build_array_ref (tmp1, index);
} }
...@@ -1720,15 +1720,15 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size, ...@@ -1720,15 +1720,15 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size,
else else
{ {
/* Increment count2. */ /* Increment count2. */
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count2, gfc_index_one_node)); count2, gfc_index_one_node);
gfc_add_modify_expr (&body1, count2, tmp); gfc_add_modify_expr (&body1, count2, tmp);
/* Increment count3. */ /* Increment count3. */
if (count3) if (count3)
{ {
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count3, gfc_index_one_node)); count3, gfc_index_one_node);
gfc_add_modify_expr (&body1, count3, tmp); gfc_add_modify_expr (&body1, count3, tmp);
} }
...@@ -1743,7 +1743,7 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size, ...@@ -1743,7 +1743,7 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size,
as tree nodes in SS may not be valid in different scope. */ as tree nodes in SS may not be valid in different scope. */
} }
/* Increment count1. */ /* Increment count1. */
tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size)); tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size);
gfc_add_modify_expr (&block, count1, tmp); gfc_add_modify_expr (&block, count1, tmp);
tmp = gfc_finish_block (&block); tmp = gfc_finish_block (&block);
...@@ -1800,11 +1800,11 @@ compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2, ...@@ -1800,11 +1800,11 @@ compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
/* Figure out how many elements we need. */ /* Figure out how many elements we need. */
for (i = 0; i < loop.dimen; i++) for (i = 0; i < loop.dimen; i++)
{ {
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
gfc_index_one_node, loop.from[i])); gfc_index_one_node, loop.from[i]);
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
tmp, loop.to[i])); tmp, loop.to[i]);
size = fold (build2 (MULT_EXPR, gfc_array_index_type, size, tmp)); size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
} }
gfc_add_block_to_block (pblock, &loop.pre); gfc_add_block_to_block (pblock, &loop.pre);
size = gfc_evaluate_now (size, pblock); size = gfc_evaluate_now (size, pblock);
...@@ -1868,7 +1868,7 @@ allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type, ...@@ -1868,7 +1868,7 @@ allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
size = compute_overall_iter_number (nested_forall_info, inner_size, block); size = compute_overall_iter_number (nested_forall_info, inner_size, block);
unit = TYPE_SIZE_UNIT (type); unit = TYPE_SIZE_UNIT (type);
bytesize = fold (build2 (MULT_EXPR, gfc_array_index_type, size, unit)); bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
*ptemp1 = NULL; *ptemp1 = NULL;
temp1 = gfc_do_allocate (bytesize, size, ptemp1, block, type); temp1 = gfc_do_allocate (bytesize, size, ptemp1, block, type);
...@@ -2033,8 +2033,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, ...@@ -2033,8 +2033,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
gfc_add_block_to_block (&body, &rse.post); gfc_add_block_to_block (&body, &rse.post);
/* Increment count. */ /* Increment count. */
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count, gfc_index_one_node)); count, gfc_index_one_node);
gfc_add_modify_expr (&body, count, tmp); gfc_add_modify_expr (&body, count, tmp);
tmp = gfc_finish_block (&body); tmp = gfc_finish_block (&body);
...@@ -2078,8 +2078,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, ...@@ -2078,8 +2078,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
gfc_add_modify_expr (&body, lse.expr, rse.expr); gfc_add_modify_expr (&body, lse.expr, rse.expr);
gfc_add_block_to_block (&body, &lse.post); gfc_add_block_to_block (&body, &lse.post);
/* Increment count. */ /* Increment count. */
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count, gfc_index_one_node)); count, gfc_index_one_node);
gfc_add_modify_expr (&body, count, tmp); gfc_add_modify_expr (&body, count, tmp);
tmp = gfc_finish_block (&body); tmp = gfc_finish_block (&body);
...@@ -2122,8 +2122,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, ...@@ -2122,8 +2122,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
gfc_add_block_to_block (&body, &lse.post); gfc_add_block_to_block (&body, &lse.post);
/* Increment count. */ /* Increment count. */
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count, gfc_index_one_node)); count, gfc_index_one_node);
gfc_add_modify_expr (&body, count, tmp); gfc_add_modify_expr (&body, count, tmp);
tmp = gfc_finish_block (&body); tmp = gfc_finish_block (&body);
...@@ -2167,8 +2167,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, ...@@ -2167,8 +2167,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
gfc_add_block_to_block (&body, &lse.post); gfc_add_block_to_block (&body, &lse.post);
/* Increment count. */ /* Increment count. */
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count, gfc_index_one_node)); count, gfc_index_one_node);
gfc_add_modify_expr (&body, count, tmp); gfc_add_modify_expr (&body, count, tmp);
tmp = gfc_finish_block (&body); tmp = gfc_finish_block (&body);
...@@ -2345,14 +2345,14 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) ...@@ -2345,14 +2345,14 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
lenvar = NULL_TREE; lenvar = NULL_TREE;
/* size = (end + step - start) / step. */ /* size = (end + step - start) / step. */
tmp = fold (build2 (MINUS_EXPR, TREE_TYPE (start[n]), tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
step[n], start[n])); step[n], start[n]);
tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp)); tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
tmp = fold (build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n])); tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
tmp = convert (gfc_array_index_type, tmp); tmp = convert (gfc_array_index_type, tmp);
size = fold (build2 (MULT_EXPR, gfc_array_index_type, size, tmp)); size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
} }
/* Record the nvar and size of current forall level. */ /* Record the nvar and size of current forall level. */
...@@ -2376,8 +2376,8 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) ...@@ -2376,8 +2376,8 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
if (code->expr) if (code->expr)
{ {
/* Allocate the mask temporary. */ /* Allocate the mask temporary. */
bytesize = fold (build2 (MULT_EXPR, gfc_array_index_type, size, bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
TYPE_SIZE_UNIT (boolean_type_node))); TYPE_SIZE_UNIT (boolean_type_node));
mask = gfc_do_allocate (bytesize, size, &pmask, &block, boolean_type_node); mask = gfc_do_allocate (bytesize, size, &pmask, &block, boolean_type_node);
...@@ -2658,8 +2658,8 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info, ...@@ -2658,8 +2658,8 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
else else
{ {
/* Increment count. */ /* Increment count. */
tmp1 = fold (build2 (PLUS_EXPR, gfc_array_index_type, count, tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
gfc_index_one_node)); gfc_index_one_node);
gfc_add_modify_expr (&body1, count, tmp1); gfc_add_modify_expr (&body1, count, tmp1);
/* Generate the copying loops. */ /* Generate the copying loops. */
...@@ -2825,8 +2825,8 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, tree mask, ...@@ -2825,8 +2825,8 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, tree mask,
if (lss == gfc_ss_terminator) if (lss == gfc_ss_terminator)
{ {
/* Increment count1. */ /* Increment count1. */
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count1, gfc_index_one_node)); count1, gfc_index_one_node);
gfc_add_modify_expr (&body, count1, tmp); gfc_add_modify_expr (&body, count1, tmp);
/* Use the scalar assignment as is. */ /* Use the scalar assignment as is. */
...@@ -2841,8 +2841,8 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, tree mask, ...@@ -2841,8 +2841,8 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, tree mask,
{ {
/* Increment count1 before finish the main body of a scalarized /* Increment count1 before finish the main body of a scalarized
expression. */ expression. */
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count1, gfc_index_one_node)); count1, gfc_index_one_node);
gfc_add_modify_expr (&body, count1, tmp); gfc_add_modify_expr (&body, count1, tmp);
gfc_trans_scalarized_loop_boundary (&loop, &body); gfc_trans_scalarized_loop_boundary (&loop, &body);
...@@ -2884,15 +2884,15 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, tree mask, ...@@ -2884,15 +2884,15 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, tree mask,
gfc_add_expr_to_block (&body, tmp); gfc_add_expr_to_block (&body, tmp);
/* Increment count2. */ /* Increment count2. */
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count2, gfc_index_one_node)); count2, gfc_index_one_node);
gfc_add_modify_expr (&body, count2, tmp); gfc_add_modify_expr (&body, count2, tmp);
} }
else else
{ {
/* Increment count1. */ /* Increment count1. */
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
count1, gfc_index_one_node)); count1, gfc_index_one_node);
gfc_add_modify_expr (&body, count1, tmp); gfc_add_modify_expr (&body, count1, tmp);
} }
......
...@@ -926,8 +926,8 @@ gfc_get_dtype (tree type) ...@@ -926,8 +926,8 @@ gfc_get_dtype (tree type)
if (size && !INTEGER_CST_P (size)) if (size && !INTEGER_CST_P (size))
{ {
tmp = build_int_cst (gfc_array_index_type, GFC_DTYPE_SIZE_SHIFT); tmp = build_int_cst (gfc_array_index_type, GFC_DTYPE_SIZE_SHIFT);
tmp = fold (build2 (LSHIFT_EXPR, gfc_array_index_type, size, tmp)); tmp = fold_build2 (LSHIFT_EXPR, gfc_array_index_type, size, tmp);
dtype = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp, dtype)); dtype = fold_build2 (PLUS_EXPR, gfc_array_index_type, tmp, dtype);
} }
/* If we don't know the size we leave it as zero. This should never happen /* If we don't know the size we leave it as zero. This should never happen
for anything that is actually used. */ for anything that is actually used. */
...@@ -1160,11 +1160,11 @@ gfc_get_array_type_bounds (tree etype, int dimen, tree * lbound, ...@@ -1160,11 +1160,11 @@ gfc_get_array_type_bounds (tree etype, int dimen, tree * lbound,
if (upper != NULL_TREE && lower != NULL_TREE && stride != NULL_TREE) if (upper != NULL_TREE && lower != NULL_TREE && stride != NULL_TREE)
{ {
tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, upper, lower)); tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, upper, lower);
tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type, tmp, tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, tmp,
gfc_index_one_node)); gfc_index_one_node);
stride = stride =
fold (build2 (MULT_EXPR, gfc_array_index_type, tmp, stride)); fold_build2 (MULT_EXPR, gfc_array_index_type, tmp, stride);
/* Check the folding worked. */ /* Check the folding worked. */
gcc_assert (INTEGER_CST_P (stride)); gcc_assert (INTEGER_CST_P (stride));
} }
......
...@@ -152,7 +152,7 @@ gfc_add_modify_expr (stmtblock_t * pblock, tree lhs, tree rhs) ...@@ -152,7 +152,7 @@ gfc_add_modify_expr (stmtblock_t * pblock, tree lhs, tree rhs)
|| AGGREGATE_TYPE_P (TREE_TYPE (lhs))); || AGGREGATE_TYPE_P (TREE_TYPE (lhs)));
#endif #endif
tmp = fold (build2_v (MODIFY_EXPR, lhs, rhs)); tmp = fold_build2 (MODIFY_EXPR, void_type_node, lhs, rhs);
gfc_add_expr_to_block (pblock, tmp); gfc_add_expr_to_block (pblock, tmp);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment