Commit 8ab78162 by Nathan Sidwell

internal-fn.c (expand_UNIQUE): New.

	* internal-fn.c (expand_UNIQUE): New.
	* internal-fn.h (enum ifn_unique_kind): New.
	* internal-fn.def (IFN_UNIQUE): New.
	* target-insns.def (unique): Define.
	* gimple.h (gimple_call_internal_unique_p): New.
	* gimple.c (gimple_call_same_target_p): Check internal fn
	uniqueness.
	* tracer.c (ignore_bb_p): Check for IFN_UNIQUE call.
	* tree-ssa-threadedge.c
	(record_temporary_equivalences_from_stmts): Likewise.
	* tree-cfg.c (gmple_call_initialize_ctrl_altering): Likewise.

From-SVN: r229459
parent a271b387
2015-10-27 Nathan Sidwell <nathan@codesourcery.com>
* internal-fn.c (expand_UNIQUE): New.
* internal-fn.h (enum ifn_unique_kind): New.
* internal-fn.def (IFN_UNIQUE): New.
* target-insns.def (unique): Define.
* gimple.h (gimple_call_internal_unique_p): New.
* gimple.c (gimple_call_same_target_p): Check internal fn
uniqueness.
* tracer.c (ignore_bb_p): Check for IFN_UNIQUE call.
* tree-ssa-threadedge.c
(record_temporary_equivalences_from_stmts): Likewise.
* tree-cfg.c (gmple_call_initialize_ctrl_altering): Likewise.
2015-10-27 Richard Henderson <rth@redhat.com> 2015-10-27 Richard Henderson <rth@redhat.com>
PR rtl-opt/67609 PR rtl-opt/67609
...@@ -29,8 +43,10 @@ ...@@ -29,8 +43,10 @@
* graphite-optimize-isl.c (get_schedule_for_node_st): New callback * graphite-optimize-isl.c (get_schedule_for_node_st): New callback
function to schedule based on isl_schedule_node. function to schedule based on isl_schedule_node.
(get_schedule_map_st): New schedule optimizer based on isl_schedule_node. (get_schedule_map_st): New schedule optimizer based on
(scop_get_domains): New. Return the isl_union_set containing the domains of all the pbbs. isl_schedule_node.
(scop_get_domains): New. Return the isl_union_set containing the
domains of all the pbbs.
(optimize_isl): Call the new function get_schedule_map_st for isl-0.15 (optimize_isl): Call the new function get_schedule_map_st for isl-0.15
2015-10-27 H.J. Lu <hongjiu.lu@intel.com> 2015-10-27 H.J. Lu <hongjiu.lu@intel.com>
...@@ -1346,7 +1346,8 @@ gimple_call_same_target_p (const gimple *c1, const gimple *c2) ...@@ -1346,7 +1346,8 @@ gimple_call_same_target_p (const gimple *c1, const gimple *c2)
{ {
if (gimple_call_internal_p (c1)) if (gimple_call_internal_p (c1))
return (gimple_call_internal_p (c2) return (gimple_call_internal_p (c2)
&& gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)); && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
&& !gimple_call_internal_unique_p (as_a <const gcall *> (c1)));
else else
return (gimple_call_fn (c1) == gimple_call_fn (c2) return (gimple_call_fn (c1) == gimple_call_fn (c2)
|| (gimple_call_fndecl (c1) || (gimple_call_fndecl (c1)
......
...@@ -2895,6 +2895,21 @@ gimple_call_internal_fn (const gimple *gs) ...@@ -2895,6 +2895,21 @@ gimple_call_internal_fn (const gimple *gs)
return gimple_call_internal_fn (gc); return gimple_call_internal_fn (gc);
} }
/* Return true, if this internal gimple call is unique. */
static inline bool
gimple_call_internal_unique_p (const gcall *gs)
{
return gimple_call_internal_fn (gs) == IFN_UNIQUE;
}
static inline bool
gimple_call_internal_unique_p (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_internal_unique_p (gc);
}
/* If CTRL_ALTERING_P is true, mark GIMPLE_CALL S to be a stmt /* If CTRL_ALTERING_P is true, mark GIMPLE_CALL S to be a stmt
that could alter control flow. */ that could alter control flow. */
......
...@@ -1958,6 +1958,30 @@ expand_VA_ARG (gcall *stmt ATTRIBUTE_UNUSED) ...@@ -1958,6 +1958,30 @@ expand_VA_ARG (gcall *stmt ATTRIBUTE_UNUSED)
gcc_unreachable (); gcc_unreachable ();
} }
/* Expand the IFN_UNIQUE function according to its first argument. */
static void
expand_UNIQUE (gcall *stmt)
{
rtx pattern = NULL_RTX;
enum ifn_unique_kind kind
= (enum ifn_unique_kind) TREE_INT_CST_LOW (gimple_call_arg (stmt, 0));
switch (kind)
{
default:
gcc_unreachable ();
case IFN_UNIQUE_UNSPEC:
if (targetm.have_unique ())
pattern = targetm.gen_unique ();
break;
}
if (pattern)
emit_insn (pattern);
}
/* Routines to expand each internal function, indexed by function number. /* Routines to expand each internal function, indexed by function number.
Each routine has the prototype: Each routine has the prototype:
......
...@@ -65,3 +65,10 @@ DEF_INTERNAL_FN (SUB_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL) ...@@ -65,3 +65,10 @@ DEF_INTERNAL_FN (SUB_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (MUL_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL) DEF_INTERNAL_FN (MUL_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (TSAN_FUNC_EXIT, ECF_NOVOPS | ECF_LEAF | ECF_NOTHROW, NULL) DEF_INTERNAL_FN (TSAN_FUNC_EXIT, ECF_NOVOPS | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (VA_ARG, ECF_NOTHROW | ECF_LEAF, NULL) DEF_INTERNAL_FN (VA_ARG, ECF_NOTHROW | ECF_LEAF, NULL)
/* An unduplicable, uncombinable function. Generally used to preserve
a CFG property in the face of jump threading, tail merging or
other such optimizations. The first argument distinguishes
between uses. See internal-fn.h for usage. */
DEF_INTERNAL_FN (UNIQUE, ECF_NOTHROW, NULL)
...@@ -20,6 +20,11 @@ along with GCC; see the file COPYING3. If not see ...@@ -20,6 +20,11 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_INTERNAL_FN_H #ifndef GCC_INTERNAL_FN_H
#define GCC_INTERNAL_FN_H #define GCC_INTERNAL_FN_H
/* INTEGER_CST values for IFN_UNIQUE function arg-0. */
enum ifn_unique_kind {
IFN_UNIQUE_UNSPEC /* Undifferentiated UNIQUE. */
};
/* Initialize internal function tables. */ /* Initialize internal function tables. */
extern void init_internal_fns (); extern void init_internal_fns ();
......
...@@ -89,5 +89,6 @@ DEF_TARGET_INSN (stack_protect_test, (rtx x0, rtx x1, rtx x2)) ...@@ -89,5 +89,6 @@ DEF_TARGET_INSN (stack_protect_test, (rtx x0, rtx x1, rtx x2))
DEF_TARGET_INSN (store_multiple, (rtx x0, rtx x1, rtx x2)) DEF_TARGET_INSN (store_multiple, (rtx x0, rtx x1, rtx x2))
DEF_TARGET_INSN (tablejump, (rtx x0, rtx x1)) DEF_TARGET_INSN (tablejump, (rtx x0, rtx x1))
DEF_TARGET_INSN (trap, (void)) DEF_TARGET_INSN (trap, (void))
DEF_TARGET_INSN (unique, (void))
DEF_TARGET_INSN (untyped_call, (rtx x0, rtx x1, rtx x2)) DEF_TARGET_INSN (untyped_call, (rtx x0, rtx x1, rtx x2))
DEF_TARGET_INSN (untyped_return, (rtx x0, rtx x1)) DEF_TARGET_INSN (untyped_return, (rtx x0, rtx x1))
...@@ -93,19 +93,26 @@ bb_seen_p (basic_block bb) ...@@ -93,19 +93,26 @@ bb_seen_p (basic_block bb)
static bool static bool
ignore_bb_p (const_basic_block bb) ignore_bb_p (const_basic_block bb)
{ {
gimple *g;
if (bb->index < NUM_FIXED_BLOCKS) if (bb->index < NUM_FIXED_BLOCKS)
return true; return true;
if (optimize_bb_for_size_p (bb)) if (optimize_bb_for_size_p (bb))
return true; return true;
/* A transaction is a single entry multiple exit region. It must be if (gimple *g = last_stmt (CONST_CAST_BB (bb)))
duplicated in its entirety or not at all. */ {
g = last_stmt (CONST_CAST_BB (bb)); /* A transaction is a single entry multiple exit region. It
if (g && gimple_code (g) == GIMPLE_TRANSACTION) must be duplicated in its entirety or not at all. */
if (gimple_code (g) == GIMPLE_TRANSACTION)
return true; return true;
/* An IFN_UNIQUE call must be duplicated as part of its group,
or not at all. */
if (is_gimple_call (g)
&& gimple_call_internal_p (g)
&& gimple_call_internal_unique_p (g))
return true;
}
return false; return false;
} }
......
...@@ -487,7 +487,11 @@ gimple_call_initialize_ctrl_altering (gimple *stmt) ...@@ -487,7 +487,11 @@ gimple_call_initialize_ctrl_altering (gimple *stmt)
|| ((flags & ECF_TM_BUILTIN) || ((flags & ECF_TM_BUILTIN)
&& is_tm_ending_fndecl (gimple_call_fndecl (stmt))) && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
/* BUILT_IN_RETURN call is same as return statement. */ /* BUILT_IN_RETURN call is same as return statement. */
|| gimple_call_builtin_p (stmt, BUILT_IN_RETURN)) || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
/* IFN_UNIQUE should be the last insn, to make checking for it
as cheap as possible. */
|| (gimple_call_internal_p (stmt)
&& gimple_call_internal_unique_p (stmt)))
gimple_call_set_ctrl_altering (stmt, true); gimple_call_set_ctrl_altering (stmt, true);
else else
gimple_call_set_ctrl_altering (stmt, false); gimple_call_set_ctrl_altering (stmt, false);
......
...@@ -247,6 +247,13 @@ record_temporary_equivalences_from_stmts_at_dest (edge e, ...@@ -247,6 +247,13 @@ record_temporary_equivalences_from_stmts_at_dest (edge e,
&& gimple_asm_volatile_p (as_a <gasm *> (stmt))) && gimple_asm_volatile_p (as_a <gasm *> (stmt)))
return NULL; return NULL;
/* If the statement is a unique builtin, we can not thread
through here. */
if (gimple_code (stmt) == GIMPLE_CALL
&& gimple_call_internal_p (stmt)
&& gimple_call_internal_unique_p (stmt))
return NULL;
/* If duplicating this block is going to cause too much code /* If duplicating this block is going to cause too much code
expansion, then do not thread through this block. */ expansion, then do not thread through this block. */
stmt_count++; stmt_count++;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment