Commit e38811ce by Richard Guenther Committed by Richard Biener

tree-ssa-structalias.c (find_func_aliases_for_builtin_call): New function split out from ...

2011-04-28  Richard Guenther  <rguenther@suse.de>

	* tree-ssa-structalias.c (find_func_aliases_for_builtin_call):
	New function split out from ...
	(find_func_aliases): ... here.  Call it.
	(find_func_aliases_for_call): Likewise.

From-SVN: r173060
parent a300121e
2011-04-28 Richard Guenther <rguenther@suse.de>
* tree-ssa-structalias.c (find_func_aliases_for_builtin_call):
New function split out from ...
(find_func_aliases): ... here. Call it.
(find_func_aliases_for_call): Likewise.
2011-04-27 Gabriel Dos Reis <gdr@integrable-solutions.net> 2011-04-27 Gabriel Dos Reis <gdr@integrable-solutions.net>
* internal-fn.h (internal_fn_name_array): Declare. * internal-fn.h (internal_fn_name_array): Declare.
......
...@@ -3959,375 +3959,406 @@ get_fi_for_callee (gimple call) ...@@ -3959,375 +3959,406 @@ get_fi_for_callee (gimple call)
gcc_unreachable (); gcc_unreachable ();
} }
/* Walk statement T setting up aliasing constraints according to the /* Create constraints for the builtin call T. Return true if the call
references found in T. This function is the main part of the was handled, otherwise false. */
constraint builder. AI points to auxiliary alias information used
when building alias sets and computing alias grouping heuristics. */
static void static bool
find_func_aliases (gimple origt) find_func_aliases_for_builtin_call (gimple t)
{ {
gimple t = origt; tree fndecl = gimple_call_fndecl (t);
VEC(ce_s, heap) *lhsc = NULL; VEC(ce_s, heap) *lhsc = NULL;
VEC(ce_s, heap) *rhsc = NULL; VEC(ce_s, heap) *rhsc = NULL;
struct constraint_expr *c;
varinfo_t fi; varinfo_t fi;
/* Now build constraints expressions. */ if (fndecl != NULL_TREE
if (gimple_code (t) == GIMPLE_PHI) && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
{ /* ??? All builtins that are handled here need to be handled
size_t i; in the alias-oracle query functions explicitly! */
unsigned int j; switch (DECL_FUNCTION_CODE (fndecl))
{
/* For a phi node, assign all the arguments to /* All the following functions return a pointer to the same object
the result. */ as their first argument points to. The functions do not add
get_constraint_for (gimple_phi_result (t), &lhsc); to the ESCAPED solution. The functions make the first argument
for (i = 0; i < gimple_phi_num_args (t); i++) pointed to memory point to what the second argument pointed to
memory points to. */
case BUILT_IN_STRCPY:
case BUILT_IN_STRNCPY:
case BUILT_IN_BCOPY:
case BUILT_IN_MEMCPY:
case BUILT_IN_MEMMOVE:
case BUILT_IN_MEMPCPY:
case BUILT_IN_STPCPY:
case BUILT_IN_STPNCPY:
case BUILT_IN_STRCAT:
case BUILT_IN_STRNCAT:
{ {
tree strippedrhs = PHI_ARG_DEF (t, i); tree res = gimple_call_lhs (t);
tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
STRIP_NOPS (strippedrhs); == BUILT_IN_BCOPY ? 1 : 0));
get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc); tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
== BUILT_IN_BCOPY ? 0 : 1));
FOR_EACH_VEC_ELT (ce_s, lhsc, j, c) if (res != NULL_TREE)
{ {
struct constraint_expr *c2; get_constraint_for (res, &lhsc);
while (VEC_length (ce_s, rhsc) > 0) if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
{ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
c2 = VEC_last (ce_s, rhsc); || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
process_constraint (new_constraint (*c, *c2)); get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
VEC_pop (ce_s, rhsc); else
} get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
VEC_free (ce_s, heap, lhsc);
VEC_free (ce_s, heap, rhsc);
} }
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
do_deref (&lhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
VEC_free (ce_s, heap, lhsc);
VEC_free (ce_s, heap, rhsc);
return true;
} }
} case BUILT_IN_MEMSET:
/* In IPA mode, we need to generate constraints to pass call {
arguments through their calls. There are two cases, tree res = gimple_call_lhs (t);
either a GIMPLE_CALL returning a value, or just a plain tree dest = gimple_call_arg (t, 0);
GIMPLE_CALL when we are not. unsigned i;
ce_s *lhsp;
In non-ipa mode, we need to generate constraints for each struct constraint_expr ac;
pointer passed by address. */ if (res != NULL_TREE)
else if (is_gimple_call (t))
{
tree fndecl = gimple_call_fndecl (t);
if (fndecl != NULL_TREE
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
/* ??? All builtins that are handled here need to be handled
in the alias-oracle query functions explicitly! */
switch (DECL_FUNCTION_CODE (fndecl))
{
/* All the following functions return a pointer to the same object
as their first argument points to. The functions do not add
to the ESCAPED solution. The functions make the first argument
pointed to memory point to what the second argument pointed to
memory points to. */
case BUILT_IN_STRCPY:
case BUILT_IN_STRNCPY:
case BUILT_IN_BCOPY:
case BUILT_IN_MEMCPY:
case BUILT_IN_MEMMOVE:
case BUILT_IN_MEMPCPY:
case BUILT_IN_STPCPY:
case BUILT_IN_STPNCPY:
case BUILT_IN_STRCAT:
case BUILT_IN_STRNCAT:
{ {
tree res = gimple_call_lhs (t); get_constraint_for (res, &lhsc);
tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl) get_constraint_for (dest, &rhsc);
== BUILT_IN_BCOPY ? 1 : 0));
tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
== BUILT_IN_BCOPY ? 0 : 1));
if (res != NULL_TREE)
{
get_constraint_for (res, &lhsc);
if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
|| DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
|| DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
else
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
VEC_free (ce_s, heap, lhsc);
VEC_free (ce_s, heap, rhsc);
}
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
do_deref (&lhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc); process_all_all_constraints (lhsc, rhsc);
VEC_free (ce_s, heap, lhsc); VEC_free (ce_s, heap, lhsc);
VEC_free (ce_s, heap, rhsc); VEC_free (ce_s, heap, rhsc);
return;
} }
case BUILT_IN_MEMSET: get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
do_deref (&lhsc);
if (flag_delete_null_pointer_checks
&& integer_zerop (gimple_call_arg (t, 1)))
{ {
tree res = gimple_call_lhs (t); ac.type = ADDRESSOF;
tree dest = gimple_call_arg (t, 0); ac.var = nothing_id;
unsigned i;
ce_s *lhsp;
struct constraint_expr ac;
if (res != NULL_TREE)
{
get_constraint_for (res, &lhsc);
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
VEC_free (ce_s, heap, lhsc);
VEC_free (ce_s, heap, rhsc);
}
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
do_deref (&lhsc);
if (flag_delete_null_pointer_checks
&& integer_zerop (gimple_call_arg (t, 1)))
{
ac.type = ADDRESSOF;
ac.var = nothing_id;
}
else
{
ac.type = SCALAR;
ac.var = integer_id;
}
ac.offset = 0;
FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, ac));
VEC_free (ce_s, heap, lhsc);
return;
} }
/* All the following functions do not return pointers, do not else
modify the points-to sets of memory reachable from their
arguments and do not add to the ESCAPED solution. */
case BUILT_IN_SINCOS:
case BUILT_IN_SINCOSF:
case BUILT_IN_SINCOSL:
case BUILT_IN_FREXP:
case BUILT_IN_FREXPF:
case BUILT_IN_FREXPL:
case BUILT_IN_GAMMA_R:
case BUILT_IN_GAMMAF_R:
case BUILT_IN_GAMMAL_R:
case BUILT_IN_LGAMMA_R:
case BUILT_IN_LGAMMAF_R:
case BUILT_IN_LGAMMAL_R:
case BUILT_IN_MODF:
case BUILT_IN_MODFF:
case BUILT_IN_MODFL:
case BUILT_IN_REMQUO:
case BUILT_IN_REMQUOF:
case BUILT_IN_REMQUOL:
case BUILT_IN_FREE:
return;
/* Trampolines are special - they set up passing the static
frame. */
case BUILT_IN_INIT_TRAMPOLINE:
{ {
tree tramp = gimple_call_arg (t, 0); ac.type = SCALAR;
tree nfunc = gimple_call_arg (t, 1); ac.var = integer_id;
tree frame = gimple_call_arg (t, 2);
unsigned i;
struct constraint_expr lhs, *rhsp;
if (in_ipa_mode)
{
varinfo_t nfi = NULL;
gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
if (nfi)
{
lhs = get_function_part_constraint (nfi, fi_static_chain);
get_constraint_for (frame, &rhsc);
FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
/* Make the frame point to the function for
the trampoline adjustment call. */
get_constraint_for (tramp, &lhsc);
do_deref (&lhsc);
get_constraint_for (nfunc, &rhsc);
process_all_all_constraints (lhsc, rhsc);
VEC_free (ce_s, heap, rhsc);
VEC_free (ce_s, heap, lhsc);
return;
}
}
/* Else fallthru to generic handling which will let
the frame escape. */
break;
} }
case BUILT_IN_ADJUST_TRAMPOLINE: ac.offset = 0;
FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, ac));
VEC_free (ce_s, heap, lhsc);
return true;
}
/* All the following functions do not return pointers, do not
modify the points-to sets of memory reachable from their
arguments and do not add to the ESCAPED solution. */
case BUILT_IN_SINCOS:
case BUILT_IN_SINCOSF:
case BUILT_IN_SINCOSL:
case BUILT_IN_FREXP:
case BUILT_IN_FREXPF:
case BUILT_IN_FREXPL:
case BUILT_IN_GAMMA_R:
case BUILT_IN_GAMMAF_R:
case BUILT_IN_GAMMAL_R:
case BUILT_IN_LGAMMA_R:
case BUILT_IN_LGAMMAF_R:
case BUILT_IN_LGAMMAL_R:
case BUILT_IN_MODF:
case BUILT_IN_MODFF:
case BUILT_IN_MODFL:
case BUILT_IN_REMQUO:
case BUILT_IN_REMQUOF:
case BUILT_IN_REMQUOL:
case BUILT_IN_FREE:
return true;
/* Trampolines are special - they set up passing the static
frame. */
case BUILT_IN_INIT_TRAMPOLINE:
{
tree tramp = gimple_call_arg (t, 0);
tree nfunc = gimple_call_arg (t, 1);
tree frame = gimple_call_arg (t, 2);
unsigned i;
struct constraint_expr lhs, *rhsp;
if (in_ipa_mode)
{ {
tree tramp = gimple_call_arg (t, 0); varinfo_t nfi = NULL;
tree res = gimple_call_lhs (t); gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
if (in_ipa_mode && res) nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
if (nfi)
{ {
get_constraint_for (res, &lhsc); lhs = get_function_part_constraint (nfi, fi_static_chain);
get_constraint_for (tramp, &rhsc); get_constraint_for (frame, &rhsc);
do_deref (&rhsc); FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
/* Make the frame point to the function for
the trampoline adjustment call. */
get_constraint_for (tramp, &lhsc);
do_deref (&lhsc);
get_constraint_for (nfunc, &rhsc);
process_all_all_constraints (lhsc, rhsc); process_all_all_constraints (lhsc, rhsc);
VEC_free (ce_s, heap, rhsc); VEC_free (ce_s, heap, rhsc);
VEC_free (ce_s, heap, lhsc); VEC_free (ce_s, heap, lhsc);
return true;
} }
return;
} }
/* Variadic argument handling needs to be handled in IPA /* Else fallthru to generic handling which will let
mode as well. */ the frame escape. */
case BUILT_IN_VA_START: break;
}
case BUILT_IN_ADJUST_TRAMPOLINE:
{
tree tramp = gimple_call_arg (t, 0);
tree res = gimple_call_lhs (t);
if (in_ipa_mode && res)
{ {
if (in_ipa_mode) get_constraint_for (res, &lhsc);
{ get_constraint_for (tramp, &rhsc);
tree valist = gimple_call_arg (t, 0); do_deref (&rhsc);
struct constraint_expr rhs, *lhsp; process_all_all_constraints (lhsc, rhsc);
unsigned i; VEC_free (ce_s, heap, rhsc);
/* The va_list gets access to pointers in variadic VEC_free (ce_s, heap, lhsc);
arguments. */
fi = lookup_vi_for_tree (cfun->decl);
gcc_assert (fi != NULL);
get_constraint_for (valist, &lhsc);
do_deref (&lhsc);
rhs = get_function_part_constraint (fi, ~0);
rhs.type = ADDRESSOF;
FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
VEC_free (ce_s, heap, lhsc);
/* va_list is clobbered. */
make_constraint_to (get_call_clobber_vi (t)->id, valist);
return;
}
break;
} }
/* va_end doesn't have any effect that matters. */ return true;
case BUILT_IN_VA_END: }
return; /* Variadic argument handling needs to be handled in IPA
/* Alternate return. Simply give up for now. */ mode as well. */
case BUILT_IN_RETURN: case BUILT_IN_VA_START:
{
if (in_ipa_mode)
{ {
fi = NULL; tree valist = gimple_call_arg (t, 0);
if (!in_ipa_mode struct constraint_expr rhs, *lhsp;
|| !(fi = get_vi_for_tree (cfun->decl))) unsigned i;
make_constraint_from (get_varinfo (escaped_id), anything_id); /* The va_list gets access to pointers in variadic
else if (in_ipa_mode arguments. */
&& fi != NULL) fi = lookup_vi_for_tree (cfun->decl);
{ gcc_assert (fi != NULL);
struct constraint_expr lhs, rhs; get_constraint_for (valist, &lhsc);
lhs = get_function_part_constraint (fi, fi_result); do_deref (&lhsc);
rhs.var = anything_id; rhs = get_function_part_constraint (fi, ~0);
rhs.offset = 0; rhs.type = ADDRESSOF;
rhs.type = SCALAR; FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (lhs, rhs)); process_constraint (new_constraint (*lhsp, rhs));
} VEC_free (ce_s, heap, lhsc);
return; /* va_list is clobbered. */
make_constraint_to (get_call_clobber_vi (t)->id, valist);
return true;
} }
/* printf-style functions may have hooks to set pointers to break;
point to somewhere into the generated string. Leave them }
for a later excercise... */ /* va_end doesn't have any effect that matters. */
default: case BUILT_IN_VA_END:
/* Fallthru to general call handling. */; return true;
} /* Alternate return. Simply give up for now. */
if (!in_ipa_mode case BUILT_IN_RETURN:
|| gimple_call_internal_p (t)
|| (fndecl
&& (!(fi = lookup_vi_for_tree (fndecl))
|| !fi->is_fn_info)))
{ {
VEC(ce_s, heap) *rhsc = NULL; fi = NULL;
int flags = gimple_call_flags (t); if (!in_ipa_mode
|| !(fi = get_vi_for_tree (cfun->decl)))
/* Const functions can return their arguments and addresses make_constraint_from (get_varinfo (escaped_id), anything_id);
of global memory but not of escaped memory. */ else if (in_ipa_mode
if (flags & (ECF_CONST|ECF_NOVOPS)) && fi != NULL)
{ {
if (gimple_call_lhs (t)) struct constraint_expr lhs, rhs;
handle_const_call (t, &rhsc); lhs = get_function_part_constraint (fi, fi_result);
rhs.var = anything_id;
rhs.offset = 0;
rhs.type = SCALAR;
process_constraint (new_constraint (lhs, rhs));
} }
/* Pure functions can return addresses in and of memory return true;
reachable from their arguments, but they are not an escape }
point for reachable memory of their arguments. */ /* printf-style functions may have hooks to set pointers to
else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE)) point to somewhere into the generated string. Leave them
handle_pure_call (t, &rhsc); for a later excercise... */
else default:
handle_rhs_call (t, &rhsc); /* Fallthru to general call handling. */;
}
return false;
}
/* Create constraints for the call T. */
static void
find_func_aliases_for_call (gimple t)
{
tree fndecl = gimple_call_fndecl (t);
VEC(ce_s, heap) *lhsc = NULL;
VEC(ce_s, heap) *rhsc = NULL;
varinfo_t fi;
if (fndecl != NULL_TREE
&& DECL_BUILT_IN (fndecl)
&& find_func_aliases_for_builtin_call (t))
return;
if (!in_ipa_mode
|| gimple_call_internal_p (t)
|| (fndecl
&& (!(fi = lookup_vi_for_tree (fndecl))
|| !fi->is_fn_info)))
{
VEC(ce_s, heap) *rhsc = NULL;
int flags = gimple_call_flags (t);
/* Const functions can return their arguments and addresses
of global memory but not of escaped memory. */
if (flags & (ECF_CONST|ECF_NOVOPS))
{
if (gimple_call_lhs (t)) if (gimple_call_lhs (t))
handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl); handle_const_call (t, &rhsc);
VEC_free (ce_s, heap, rhsc);
} }
/* Pure functions can return addresses in and of memory
reachable from their arguments, but they are not an escape
point for reachable memory of their arguments. */
else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
handle_pure_call (t, &rhsc);
else else
{ handle_rhs_call (t, &rhsc);
tree lhsop; if (gimple_call_lhs (t))
unsigned j; handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
VEC_free (ce_s, heap, rhsc);
}
else
{
tree lhsop;
unsigned j;
fi = get_fi_for_callee (t); fi = get_fi_for_callee (t);
/* Assign all the passed arguments to the appropriate incoming /* Assign all the passed arguments to the appropriate incoming
parameters of the function. */ parameters of the function. */
for (j = 0; j < gimple_call_num_args (t); j++) for (j = 0; j < gimple_call_num_args (t); j++)
{ {
struct constraint_expr lhs ; struct constraint_expr lhs ;
struct constraint_expr *rhsp; struct constraint_expr *rhsp;
tree arg = gimple_call_arg (t, j); tree arg = gimple_call_arg (t, j);
get_constraint_for_rhs (arg, &rhsc);
lhs = get_function_part_constraint (fi, fi_parm_base + j);
while (VEC_length (ce_s, rhsc) != 0)
{
rhsp = VEC_last (ce_s, rhsc);
process_constraint (new_constraint (lhs, *rhsp));
VEC_pop (ce_s, rhsc);
}
}
/* If we are returning a value, assign it to the result. */ get_constraint_for_rhs (arg, &rhsc);
lhsop = gimple_call_lhs (t); lhs = get_function_part_constraint (fi, fi_parm_base + j);
if (lhsop) while (VEC_length (ce_s, rhsc) != 0)
{ {
struct constraint_expr rhs; rhsp = VEC_last (ce_s, rhsc);
struct constraint_expr *lhsp; process_constraint (new_constraint (lhs, *rhsp));
VEC_pop (ce_s, rhsc);
get_constraint_for (lhsop, &lhsc);
rhs = get_function_part_constraint (fi, fi_result);
if (fndecl
&& DECL_RESULT (fndecl)
&& DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
{
VEC(ce_s, heap) *tem = NULL;
VEC_safe_push (ce_s, heap, tem, &rhs);
do_deref (&tem);
rhs = *VEC_index (ce_s, tem, 0);
VEC_free(ce_s, heap, tem);
}
FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
} }
}
/* If we pass the result decl by reference, honor that. */ /* If we are returning a value, assign it to the result. */
if (lhsop lhsop = gimple_call_lhs (t);
&& fndecl if (lhsop)
{
struct constraint_expr rhs;
struct constraint_expr *lhsp;
get_constraint_for (lhsop, &lhsc);
rhs = get_function_part_constraint (fi, fi_result);
if (fndecl
&& DECL_RESULT (fndecl) && DECL_RESULT (fndecl)
&& DECL_BY_REFERENCE (DECL_RESULT (fndecl))) && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
{ {
struct constraint_expr lhs; VEC(ce_s, heap) *tem = NULL;
struct constraint_expr *rhsp; VEC_safe_push (ce_s, heap, tem, &rhs);
do_deref (&tem);
get_constraint_for_address_of (lhsop, &rhsc); rhs = *VEC_index (ce_s, tem, 0);
lhs = get_function_part_constraint (fi, fi_result); VEC_free(ce_s, heap, tem);
FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
} }
FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
}
/* If we use a static chain, pass it along. */ /* If we pass the result decl by reference, honor that. */
if (gimple_call_chain (t)) if (lhsop
{ && fndecl
struct constraint_expr lhs; && DECL_RESULT (fndecl)
struct constraint_expr *rhsp; && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
{
struct constraint_expr lhs;
struct constraint_expr *rhsp;
get_constraint_for (gimple_call_chain (t), &rhsc); get_constraint_for_address_of (lhsop, &rhsc);
lhs = get_function_part_constraint (fi, fi_static_chain); lhs = get_function_part_constraint (fi, fi_result);
FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp) FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp)); process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
}
/* If we use a static chain, pass it along. */
if (gimple_call_chain (t))
{
struct constraint_expr lhs;
struct constraint_expr *rhsp;
get_constraint_for (gimple_call_chain (t), &rhsc);
lhs = get_function_part_constraint (fi, fi_static_chain);
FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
}
}
/* Walk statement T setting up aliasing constraints according to the
references found in T. This function is the main part of the
constraint builder. AI points to auxiliary alias information used
when building alias sets and computing alias grouping heuristics. */
static void
find_func_aliases (gimple origt)
{
gimple t = origt;
VEC(ce_s, heap) *lhsc = NULL;
VEC(ce_s, heap) *rhsc = NULL;
struct constraint_expr *c;
varinfo_t fi;
/* Now build constraints expressions. */
if (gimple_code (t) == GIMPLE_PHI)
{
size_t i;
unsigned int j;
/* For a phi node, assign all the arguments to
the result. */
get_constraint_for (gimple_phi_result (t), &lhsc);
for (i = 0; i < gimple_phi_num_args (t); i++)
{
tree strippedrhs = PHI_ARG_DEF (t, i);
STRIP_NOPS (strippedrhs);
get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
FOR_EACH_VEC_ELT (ce_s, lhsc, j, c)
{
struct constraint_expr *c2;
while (VEC_length (ce_s, rhsc) > 0)
{
c2 = VEC_last (ce_s, rhsc);
process_constraint (new_constraint (*c, *c2));
VEC_pop (ce_s, rhsc);
}
} }
} }
} }
/* In IPA mode, we need to generate constraints to pass call
arguments through their calls. There are two cases,
either a GIMPLE_CALL returning a value, or just a plain
GIMPLE_CALL when we are not.
In non-ipa mode, we need to generate constraints for each
pointer passed by address. */
else if (is_gimple_call (t))
find_func_aliases_for_call (t);
/* Otherwise, just a regular assignment statement. Only care about /* Otherwise, just a regular assignment statement. Only care about
operations with pointer result, others are dealt with as escape operations with pointer result, others are dealt with as escape
points if they have pointer operands. */ points if they have pointer operands. */
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment