Commit 49dcd8a4 by Jakub Jelinek

re PR tree-optimization/82549 (ICE at -O1 and above: verify_gimple failed)

	PR tree-optimization/82549
	* fold-const.c (optimize_bit_field_compare, fold_truth_andor_1):
	Formatting fixes.  Instead of calling make_bit_field_ref with negative
	bitpos return 0.

	* gcc.c-torture/compile/pr82549.c: New test.

From-SVN: r253805
parent 7a76132c
17-10-2017 Olga Makhotina <olga.makhotina@intel.com> 2017-10-17 Jakub Jelinek <jakub@redhat.com>
PR tree-optimization/82549
* fold-const.c (optimize_bit_field_compare, fold_truth_andor_1):
Formatting fixes. Instead of calling make_bit_field_ref with negative
bitpos return 0.
2017-10-17 Olga Makhotina <olga.makhotina@intel.com>
* config/i386/avx512dqintrin.h (_mm_mask_reduce_sd, * config/i386/avx512dqintrin.h (_mm_mask_reduce_sd,
_mm_maskz_reduce_sd, _mm_mask_reduce_ss,=20 _mm_maskz_reduce_sd, _mm_mask_reduce_ss,=20
...@@ -4013,21 +4013,20 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, ...@@ -4013,21 +4013,20 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
size_int (nbitsize - lbitsize - lbitpos)); size_int (nbitsize - lbitsize - lbitpos));
if (! const_p) if (! const_p)
/* If not comparing with constant, just rework the comparison {
and return. */ if (nbitpos < 0)
return fold_build2_loc (loc, code, compare_type, return 0;
fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
make_bit_field_ref (loc, linner, lhs, /* If not comparing with constant, just rework the comparison
unsigned_type, and return. */
nbitsize, nbitpos, tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
1, lreversep), nbitsize, nbitpos, 1, lreversep);
mask), t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
make_bit_field_ref (loc, rinner, rhs, nbitsize, nbitpos, 1, rreversep);
unsigned_type, t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
nbitsize, nbitpos, return fold_build2_loc (loc, code, compare_type, t1, t2);
1, rreversep), }
mask));
/* Otherwise, we are handling the constant case. See if the constant is too /* Otherwise, we are handling the constant case. See if the constant is too
big for the field. Warn and return a tree for 0 (false) if so. We do big for the field. Warn and return a tree for 0 (false) if so. We do
...@@ -4058,6 +4057,9 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, ...@@ -4058,6 +4057,9 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
} }
} }
if (nbitpos < 0)
return 0;
/* Single-bit compares should always be against zero. */ /* Single-bit compares should always be against zero. */
if (lbitsize == 1 && ! integer_zerop (rhs)) if (lbitsize == 1 && ! integer_zerop (rhs))
{ {
...@@ -5874,7 +5876,10 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type, ...@@ -5874,7 +5876,10 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
results. */ results. */
ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask); lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos) if (lnbitsize == rnbitsize
&& xll_bitpos == xlr_bitpos
&& lnbitpos >= 0
&& rnbitpos >= 0)
{ {
lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
lntype, lnbitsize, lnbitpos, lntype, lnbitsize, lnbitpos,
...@@ -5898,10 +5903,14 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type, ...@@ -5898,10 +5903,14 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
Note that we still must mask the lhs/rhs expressions. Furthermore, Note that we still must mask the lhs/rhs expressions. Furthermore,
the mask must be shifted to account for the shift done by the mask must be shifted to account for the shift done by
make_bit_field_ref. */ make_bit_field_ref. */
if ((ll_bitsize + ll_bitpos == rl_bitpos if (((ll_bitsize + ll_bitpos == rl_bitpos
&& lr_bitsize + lr_bitpos == rr_bitpos) && lr_bitsize + lr_bitpos == rr_bitpos)
|| (ll_bitpos == rl_bitpos + rl_bitsize || (ll_bitpos == rl_bitpos + rl_bitsize
&& lr_bitpos == rr_bitpos + rr_bitsize)) && lr_bitpos == rr_bitpos + rr_bitsize))
&& ll_bitpos >= 0
&& rl_bitpos >= 0
&& lr_bitpos >= 0
&& rr_bitpos >= 0)
{ {
tree type; tree type;
...@@ -5970,6 +5979,9 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type, ...@@ -5970,6 +5979,9 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
} }
} }
if (lnbitpos < 0)
return 0;
/* Construct the expression we will return. First get the component /* Construct the expression we will return. First get the component
reference we will make. Unless the mask is all ones the width of reference we will make. Unless the mask is all ones the width of
that field, perform the mask operation. Then compare with the that field, perform the mask operation. Then compare with the
......
2017-10-17 Jakub Jelinek <jakub@redhat.com>
PR tree-optimization/82549
* gcc.c-torture/compile/pr82549.c: New test.
2017-10-17 Martin Liska <mliska@suse.cz> 2017-10-17 Martin Liska <mliska@suse.cz>
* lib/scanasm.exp: Print how many times a regex pattern is * lib/scanasm.exp: Print how many times a regex pattern is
found. found.
* lib/scandump.exp: Likewise. * lib/scandump.exp: Likewise.
17-10-2017 Olga Makhotina <olga.makhotina@intel.com> 2017-10-17 Olga Makhotina <olga.makhotina@intel.com>
* gcc.target/i386/avx512dq-vreducesd-1.c (_mm_mask_reduce_sd, * gcc.target/i386/avx512dq-vreducesd-1.c (_mm_mask_reduce_sd,
_mm_maskz_reduce_sd): Test new intrinsics. _mm_maskz_reduce_sd): Test new intrinsics.
......
/* PR tree-optimization/82549 */
int a, b[1];
int
main ()
{
return !a || b[-2] || b[-2];
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment