Commit c3e686a3 by Eric Botcazou Committed by Eric Botcazou

re PR tree-optimization/51315 (unaligned memory accesses generated with -ftree-sra)

	PR tree-optimization/51315
	* tree-sra.c (tree_non_aligned_mem_for_access_p): New predicate.
	(build_accesses_from_assign): Use it instead of tree_non_aligned_mem_p.

From-SVN: r182932
parent 2d130b31
2012-01-05 Eric Botcazou <ebotcazou@adacore.com>
PR tree-optimization/51315
* tree-sra.c (tree_non_aligned_mem_for_access_p): New predicate.
(build_accesses_from_assign): Use it instead of tree_non_aligned_mem_p.
2012-01-05 Uros Bizjak <ubizjak@gmail.com>
PR target/51681
2012-01-05 Eric Botcazou <ebotcazou@adacore.com>
* gcc.c-torture/execute/20120104-1.c: New test.
2012-01-05 Paul Thomas <pault@gcc.gnu.org>
PR fortran/PR48946
......
struct __attribute__((packed)) S
{
int a, b, c;
};
static int __attribute__ ((noinline,noclone))
extract(const char *p)
{
struct S s;
__builtin_memcpy (&s, p, sizeof(struct S));
return s.a;
}
volatile int i;
int main (void)
{
char p[sizeof(struct S) + 1];
__builtin_memset (p, 0, sizeof(struct S) + 1);
i = extract (p + 1);
return 0;
}
......@@ -1095,6 +1095,25 @@ tree_non_aligned_mem_p (tree exp, unsigned int align)
return false;
}
/* Return true if EXP is a memory reference less aligned than what the access
ACC would require. This is invoked only on strict-alignment targets. */
static bool
tree_non_aligned_mem_for_access_p (tree exp, struct access *acc)
{
unsigned int acc_align;
/* The alignment of the access is that of its expression. However, it may
have been artificially increased, e.g. by a local alignment promotion,
so we cap it to the alignment of the type of the base, on the grounds
that valid sub-accesses cannot be more aligned than that. */
acc_align = get_object_alignment (acc->expr);
if (acc->base && acc_align > TYPE_ALIGN (TREE_TYPE (acc->base)))
acc_align = TYPE_ALIGN (TREE_TYPE (acc->base));
return tree_non_aligned_mem_p (exp, acc_align);
}
/* Scan expressions occuring in STMT, create access structures for all accesses
to candidates for scalarization and remove those candidates which occur in
statements or expressions that prevent them from being split apart. Return
......@@ -1123,8 +1142,7 @@ build_accesses_from_assign (gimple stmt)
if (lacc)
{
lacc->grp_assignment_write = 1;
if (STRICT_ALIGNMENT
&& tree_non_aligned_mem_p (rhs, get_object_alignment (lhs)))
if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (rhs, lacc))
lacc->grp_unscalarizable_region = 1;
}
......@@ -1134,8 +1152,7 @@ build_accesses_from_assign (gimple stmt)
if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
&& !is_gimple_reg_type (racc->type))
bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
if (STRICT_ALIGNMENT
&& tree_non_aligned_mem_p (lhs, get_object_alignment (rhs)))
if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (lhs, racc))
racc->grp_unscalarizable_region = 1;
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment