Commit 44fc0a51 by Richard Biener Committed by Richard Biener

genmatch.c (struct expr): Add force_single_use flag.

2015-07-09  Richard Biener  <rguenther@suse.de>

	* genmatch.c (struct expr): Add force_single_use flag.
	(expr::expr): Add copy constructor.
	(capture_info::walk_match): Gather force_single_use captures.
	(expr::gen_transform): Use possibly NULLified sequence.
	(dt_simplify::gen): Apply single-use restrictions by NULLifying
	seq if any constrained expr is not single-use.
	(parser::parse_expr): Refactor to allow multiple flags.  Handle
	's' flag to force an expression have a single-use if the pattern
	simplifies to more than one statement.
	* match.pd: Convert most single_use conditionals to :s flags.

From-SVN: r225610
parent d48ca705
2015-07-09 Richard Biener <rguenther@suse.de>
* genmatch.c (struct expr): Add force_single_use flag.
(expr::expr): Add copy constructor.
(capture_info::walk_match): Gather force_single_use captures.
(expr::gen_transform): Use possibly NULLified sequence.
(dt_simplify::gen): Apply single-use restrictions by NULLifying
seq if any constrained expr is not single-use.
(parser::parse_expr): Refactor to allow multiple flags. Handle
's' flag to force an expression have a single-use if the pattern
simplifies to more than one statement.
* match.pd: Convert most single_use conditionals to :s flags.
2015-07-09 H.J. Lu <hongjiu.lu@intel.com>
* config/i386/iamcu.h (ASM_OUTPUT_ALIGNED_BSS): New.
......
......@@ -334,17 +334,15 @@ along with GCC; see the file COPYING3. If not see
/* x + (x & 1) -> (x + 1) & ~1 */
(simplify
(plus:c @0 (bit_and@2 @0 integer_onep@1))
(if (single_use (@2))
(bit_and (plus @0 @1) (bit_not @1))))
(plus:c @0 (bit_and:s @0 integer_onep@1))
(bit_and (plus @0 @1) (bit_not @1)))
/* x & ~(x & y) -> x & ~y */
/* x | ~(x | y) -> x | ~y */
(for bitop (bit_and bit_ior)
(simplify
(bitop:c @0 (bit_not (bitop:c@2 @0 @1)))
(if (single_use (@2))
(bitop @0 (bit_not @1)))))
(bitop:c @0 (bit_not (bitop:cs @0 @1)))
(bitop @0 (bit_not @1))))
/* (x | y) & ~x -> y & ~x */
/* (x & y) | ~x -> y | ~x */
......@@ -633,17 +631,14 @@ along with GCC; see the file COPYING3. If not see
/* (x & ~m) | (y & m) -> ((x ^ y) & m) ^ x */
(simplify
(bit_ior:c (bit_and:c@3 @0 (bit_not @2)) (bit_and:c@4 @1 @2))
(if (single_use (@3) && single_use (@4))
(bit_xor (bit_and (bit_xor @0 @1) @2) @0)))
(bit_ior:c (bit_and:cs @0 (bit_not @2)) (bit_and:cs @1 @2))
(bit_xor (bit_and (bit_xor @0 @1) @2) @0))
/* Associate (p +p off1) +p off2 as (p +p (off1 + off2)). */
(simplify
(pointer_plus (pointer_plus@2 @0 @1) @3)
(if (single_use (@2)
|| (TREE_CODE (@1) == INTEGER_CST && TREE_CODE (@3) == INTEGER_CST))
(pointer_plus @0 (plus @1 @3))))
(pointer_plus (pointer_plus:s @0 @1) @3)
(pointer_plus @0 (plus @1 @3)))
/* Pattern match
tem1 = (long) ptr1;
......@@ -913,7 +908,8 @@ along with GCC; see the file COPYING3. If not see
if the new mask might be further optimized. */
(for shift (lshift rshift)
(simplify
(bit_and (convert?@4 (shift@5 (convert1?@3 @0) INTEGER_CST@1)) INTEGER_CST@2)
(bit_and (convert?:s@4 (shift:s@5 (convert1?@3 @0) INTEGER_CST@1))
INTEGER_CST@2)
(if (tree_nop_conversion_p (TREE_TYPE (@4), TREE_TYPE (@5))
&& TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT
&& tree_fits_uhwi_p (@1)
......@@ -993,8 +989,7 @@ along with GCC; see the file COPYING3. If not see
(with
{ tree newmaskt = build_int_cst_type (TREE_TYPE (@2), newmask); }
(if (!tree_int_cst_equal (newmaskt, @2))
(if (shift_type != TREE_TYPE (@3)
&& single_use (@4) && single_use (@5))
(if (shift_type != TREE_TYPE (@3))
(bit_and (convert (shift:shift_type (convert @3) @1)) { newmaskt; }))
(if (shift_type == TREE_TYPE (@3))
(bit_and @4 { newmaskt; }))))))))))))
......@@ -1733,7 +1728,7 @@ along with GCC; see the file COPYING3. If not see
operation and convert the result to the desired type. */
(for op (plus minus)
(simplify
(convert (op@4 (convert@2 @0) (convert@3 @1)))
(convert (op:s (convert@2 @0) (convert@3 @1)))
(if (INTEGRAL_TYPE_P (type)
/* We check for type compatibility between @0 and @1 below,
so there's no need to check that @1/@3 are integral types. */
......@@ -1750,8 +1745,7 @@ along with GCC; see the file COPYING3. If not see
/* The inner conversion must be a widening conversion. */
&& TYPE_PRECISION (TREE_TYPE (@2)) > TYPE_PRECISION (TREE_TYPE (@0))
&& types_match (@0, @1)
&& types_match (@0, type)
&& single_use (@4))
&& types_match (@0, type))
(if (TYPE_OVERFLOW_WRAPS (TREE_TYPE (@0)))
(convert (op @0 @1)))
(with { tree utype = unsigned_type_for (TREE_TYPE (@0)); }
......@@ -1764,7 +1758,7 @@ along with GCC; see the file COPYING3. If not see
arithmetic operation. */
(for op (minus plus)
(simplify
(bit_and (op@5 (convert@2 @0) (convert@3 @1)) INTEGER_CST@4)
(bit_and (op:s (convert@2 @0) (convert@3 @1)) INTEGER_CST@4)
(if (INTEGRAL_TYPE_P (type)
/* We check for type compatibility between @0 and @1 below,
so there's no need to check that @1/@3 are integral types. */
......@@ -1784,8 +1778,7 @@ along with GCC; see the file COPYING3. If not see
&& (tree_int_cst_min_precision (@4, TYPE_SIGN (TREE_TYPE (@0)))
<= TYPE_PRECISION (TREE_TYPE (@0)))
&& (TYPE_OVERFLOW_WRAPS (TREE_TYPE (@0))
|| tree_int_cst_sgn (@4) >= 0)
&& single_use (@5))
|| tree_int_cst_sgn (@4) >= 0))
(if (TYPE_OVERFLOW_WRAPS (TREE_TYPE (@0)))
(with { tree ntype = TREE_TYPE (@0); }
(convert (bit_and (op @0 @1) (convert:ntype @4)))))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment