Commit e28f2090 by Yury Gribov Committed by Yury Gribov

Improve optimization of ASan checks.

2014-12-03  Yury Gribov  <y.gribov@samsung.com>

gcc/
	* sanopt.c (maybe_get_single_definition): New function.
	(maybe_get_dominating_check): Ditto.
	(can_remove_asan_check): Ditto.
	(struct tree_map_traits): New struct.
	(struct sanopt_ctx): Use custom traits for asan_check_map.
	(maybe_optimize_ubsan_null_ifn): Move code to
	maybe_get_dominating_check.
	(maybe_optimize_asan_check_ifn): Move code and take non-SSA expressions
	into account when optimizing.
	(sanopt_optimize_walker): Optimize ASan checks even when
	recovering.

From-SVN: r218304
parent 45392c76
2014-12-03 Yury Gribov <y.gribov@samsung.com>
* sanopt.c (maybe_get_single_definition): New function.
(maybe_get_dominating_check): Ditto.
(can_remove_asan_check): Ditto.
(struct tree_map_traits): New struct.
(struct sanopt_ctx): Use custom traits for asan_check_map.
(maybe_optimize_ubsan_null_ifn): Move code to
maybe_get_dominating_check.
(maybe_optimize_asan_check_ifn): Move code and take non-SSA expressions
into account when optimizing.
(sanopt_optimize_walker): Optimize ASan checks even when
recovering.
2014-12-03 Ilya Enkovich <ilya.enkovich@intel.com> 2014-12-03 Ilya Enkovich <ilya.enkovich@intel.com>
* config/i386/constraints.md (Yr): New. * config/i386/constraints.md (Yr): New.
...@@ -84,6 +84,35 @@ struct sanopt_info ...@@ -84,6 +84,35 @@ struct sanopt_info
bool visited_p; bool visited_p;
}; };
/* If T has a single definition of form T = T2, return T2. */
static tree
maybe_get_single_definition (tree t)
{
if (TREE_CODE (t) == SSA_NAME)
{
gimple g = SSA_NAME_DEF_STMT (t);
if (gimple_assign_single_p (g))
return gimple_assign_rhs1 (g);
}
return NULL_TREE;
}
/* Traits class for tree hash maps below. */
struct tree_map_traits : default_hashmap_traits
{
static inline hashval_t hash (const_tree ref)
{
return iterative_hash_expr (ref, 0);
}
static inline bool equal_keys (const_tree ref1, const_tree ref2)
{
return operand_equal_p (ref1, ref2, 0);
}
};
/* This is used to carry various hash maps and variables used /* This is used to carry various hash maps and variables used
in sanopt_optimize_walker. */ in sanopt_optimize_walker. */
...@@ -95,7 +124,7 @@ struct sanopt_ctx ...@@ -95,7 +124,7 @@ struct sanopt_ctx
/* This map maps a pointer (the second argument of ASAN_CHECK) to /* This map maps a pointer (the second argument of ASAN_CHECK) to
a vector of ASAN_CHECK call statements that check the access. */ a vector of ASAN_CHECK call statements that check the access. */
hash_map<tree, auto_vec<gimple> > asan_check_map; hash_map<tree, auto_vec<gimple>, tree_map_traits> asan_check_map;
/* Number of IFN_ASAN_CHECK statements. */ /* Number of IFN_ASAN_CHECK statements. */
int asan_num_accesses; int asan_num_accesses;
...@@ -197,6 +226,24 @@ imm_dom_path_with_freeing_call (basic_block bb, basic_block dom) ...@@ -197,6 +226,24 @@ imm_dom_path_with_freeing_call (basic_block bb, basic_block dom)
return false; return false;
} }
/* Get the first dominating check from the list of stored checks.
Non-dominating checks are silently dropped. */
static gimple
maybe_get_dominating_check (auto_vec<gimple> &v)
{
for (; !v.is_empty (); v.pop ())
{
gimple g = v.last ();
sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
if (!si->visited_p)
/* At this point we shouldn't have any statements
that aren't dominating the current BB. */
return g;
}
return NULL;
}
/* Optimize away redundant UBSAN_NULL calls. */ /* Optimize away redundant UBSAN_NULL calls. */
static bool static bool
...@@ -209,7 +256,8 @@ maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt) ...@@ -209,7 +256,8 @@ maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt)
bool remove = false; bool remove = false;
auto_vec<gimple> &v = ctx->null_check_map.get_or_insert (ptr); auto_vec<gimple> &v = ctx->null_check_map.get_or_insert (ptr);
if (v.is_empty ()) gimple g = maybe_get_dominating_check (v);
if (!g)
{ {
/* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's /* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's
nothing to optimize yet. */ nothing to optimize yet. */
...@@ -220,90 +268,42 @@ maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt) ...@@ -220,90 +268,42 @@ maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt)
/* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we /* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we
can drop this one. But only if this check doesn't specify stricter can drop this one. But only if this check doesn't specify stricter
alignment. */ alignment. */
while (!v.is_empty ())
{
gimple g = v.last ();
/* Remove statements for BBs that have been already processed. */
sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
if (si->visited_p)
{
v.pop ();
continue;
}
/* At this point we shouldn't have any statements that aren't dominating tree align = gimple_call_arg (g, 2);
the current BB. */ int kind = tree_to_shwi (gimple_call_arg (g, 1));
tree align = gimple_call_arg (g, 2); /* If this is a NULL pointer check where we had segv anyway, we can
int kind = tree_to_shwi (gimple_call_arg (g, 1)); remove it. */
/* If this is a NULL pointer check where we had segv anyway, we can if (integer_zerop (align)
remove it. */ && (kind == UBSAN_LOAD_OF
if (integer_zerop (align) || kind == UBSAN_STORE_OF
&& (kind == UBSAN_LOAD_OF || kind == UBSAN_MEMBER_ACCESS))
|| kind == UBSAN_STORE_OF remove = true;
|| kind == UBSAN_MEMBER_ACCESS)) /* Otherwise remove the check in non-recovering mode, or if the
remove = true; stmts have same location. */
/* Otherwise remove the check in non-recovering mode, or if the else if (integer_zerop (align))
stmts have same location. */ remove = (flag_sanitize_recover & SANITIZE_NULL) == 0
else if (integer_zerop (align)) || flag_sanitize_undefined_trap_on_error
remove = (flag_sanitize_recover & SANITIZE_NULL) == 0 || gimple_location (g) == gimple_location (stmt);
|| flag_sanitize_undefined_trap_on_error else if (tree_int_cst_le (cur_align, align))
|| gimple_location (g) == gimple_location (stmt); remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0
else if (tree_int_cst_le (cur_align, align)) || flag_sanitize_undefined_trap_on_error
remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0 || gimple_location (g) == gimple_location (stmt);
|| flag_sanitize_undefined_trap_on_error
|| gimple_location (g) == gimple_location (stmt); if (!remove && gimple_bb (g) == gimple_bb (stmt)
if (!remove && gimple_bb (g) == gimple_bb (stmt) && tree_int_cst_compare (cur_align, align) == 0)
&& tree_int_cst_compare (cur_align, align) == 0) v.pop ();
v.pop ();
break;
}
if (!remove) if (!remove)
v.safe_push (stmt); v.safe_push (stmt);
return remove; return remove;
} }
/* Optimize away redundant ASAN_CHECK calls. */ /* Returns TRUE if ASan check of length LEN in block BB can be removed
if preceded by checks in V. */
static bool static bool
maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt) can_remove_asan_check (auto_vec<gimple> &v, tree len, basic_block bb)
{ {
gcc_assert (gimple_call_num_args (stmt) == 4);
tree ptr = gimple_call_arg (stmt, 1);
tree len = gimple_call_arg (stmt, 2);
basic_block bb = gimple_bb (stmt);
sanopt_info *info = (sanopt_info *) bb->aux;
if (TREE_CODE (len) != INTEGER_CST)
return false;
if (integer_zerop (len))
return false;
gimple_set_uid (stmt, info->freeing_call_events);
auto_vec<gimple> &v = ctx->asan_check_map.get_or_insert (ptr);
if (v.is_empty ())
{
/* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's
nothing to optimize yet. */
v.safe_push (stmt);
return false;
}
/* We already have recorded a ASAN_CHECK check for this pointer. Perhaps
we can drop this one. But only if this check doesn't specify larger
size. */
while (!v.is_empty ())
{
gimple g = v.last ();
/* Remove statements for BBs that have been already processed. */
sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
if (si->visited_p)
v.pop ();
else
break;
}
unsigned int i; unsigned int i;
gimple g; gimple g;
gimple to_pop = NULL; gimple to_pop = NULL;
...@@ -323,17 +323,9 @@ maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt) ...@@ -323,17 +323,9 @@ maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt)
continue; continue;
} }
if (TREE_CODE (len) != INTEGER_CST)
{
/* If there is some stmts not followed by freeing call event
for ptr already in the current bb, no need to insert anything.
Non-constant len is treated just as length 1. */
if (gbb == bb)
return false;
break;
}
tree glen = gimple_call_arg (g, 2); tree glen = gimple_call_arg (g, 2);
gcc_assert (TREE_CODE (glen) == INTEGER_CST);
/* If we've checked only smaller length than we want to check now, /* If we've checked only smaller length than we want to check now,
we can't remove the current stmt. If g is in the same basic block, we can't remove the current stmt. If g is in the same basic block,
we want to remove it though, as the current stmt is better. */ we want to remove it though, as the current stmt is better. */
...@@ -383,8 +375,70 @@ maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt) ...@@ -383,8 +375,70 @@ maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt)
v.truncate (j); v.truncate (j);
} }
return remove;
}
/* Optimize away redundant ASAN_CHECK calls. */
static bool
maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt)
{
gcc_assert (gimple_call_num_args (stmt) == 4);
tree ptr = gimple_call_arg (stmt, 1);
tree len = gimple_call_arg (stmt, 2);
basic_block bb = gimple_bb (stmt);
sanopt_info *info = (sanopt_info *) bb->aux;
if (TREE_CODE (len) != INTEGER_CST)
return false;
if (integer_zerop (len))
return false;
gimple_set_uid (stmt, info->freeing_call_events);
auto_vec<gimple> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
tree base_addr = maybe_get_single_definition (ptr);
auto_vec<gimple> *base_checks = NULL;
if (base_addr)
{
base_checks = &ctx->asan_check_map.get_or_insert (base_addr);
/* Original pointer might have been invalidated. */
ptr_checks = ctx->asan_check_map.get (ptr);
}
gimple g = maybe_get_dominating_check (*ptr_checks);
if (!g && base_checks)
/* Try with base address as well. */
g = maybe_get_dominating_check (*base_checks);
if (!g)
{
/* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's
nothing to optimize yet. */
ptr_checks->safe_push (stmt);
if (base_checks)
base_checks->safe_push (stmt);
return false;
}
bool remove = false;
if (ptr_checks)
remove = can_remove_asan_check (*ptr_checks, len, bb);
if (!remove && base_checks)
/* Try with base address as well. */
remove = can_remove_asan_check (*base_checks, len, bb);
if (!remove) if (!remove)
v.safe_push (stmt); {
ptr_checks->safe_push (stmt);
if (base_checks)
base_checks->safe_push (stmt);
}
return remove; return remove;
} }
...@@ -404,10 +458,7 @@ sanopt_optimize_walker (basic_block bb, struct sanopt_ctx *ctx) ...@@ -404,10 +458,7 @@ sanopt_optimize_walker (basic_block bb, struct sanopt_ctx *ctx)
basic_block son; basic_block son;
gimple_stmt_iterator gsi; gimple_stmt_iterator gsi;
sanopt_info *info = (sanopt_info *) bb->aux; sanopt_info *info = (sanopt_info *) bb->aux;
bool asan_check_optimize bool asan_check_optimize = (flag_sanitize & SANITIZE_ADDRESS) != 0;
= (flag_sanitize & SANITIZE_ADDRESS)
&& ((flag_sanitize & flag_sanitize_recover
& SANITIZE_KERNEL_ADDRESS) == 0);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{ {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment