aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorEric Botcazou <ebotcazou@adacore.com>2012-01-05 22:21:29 +0000
committerEric Botcazou <ebotcazou@gcc.gnu.org>2012-01-05 22:21:29 +0000
commitc3e686a350915a66b0761fa825fc162ef0010c3e (patch)
treef50f930639a48ee9746a24d75d4a228ef9bf935a /gcc
parent2d130b315dfc09b83b40d41447d5eb4a617843c6 (diff)
downloadgcc-c3e686a350915a66b0761fa825fc162ef0010c3e.zip
gcc-c3e686a350915a66b0761fa825fc162ef0010c3e.tar.gz
gcc-c3e686a350915a66b0761fa825fc162ef0010c3e.tar.bz2
re PR tree-optimization/51315 (unaligned memory accesses generated with -ftree-sra)
PR tree-optimization/51315 * tree-sra.c (tree_non_aligned_mem_for_access_p): New predicate. (build_accesses_from_assign): Use it instead of tree_non_aligned_mem_p. From-SVN: r182932
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog6
-rw-r--r--gcc/testsuite/ChangeLog5
-rw-r--r--gcc/testsuite/gcc.c-torture/execute/20120105-1.c24
-rw-r--r--gcc/tree-sra.c25
4 files changed, 56 insertions, 4 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 206e9ce..9d2648d 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,9 @@
+2012-01-05 Eric Botcazou <ebotcazou@adacore.com>
+
+ PR tree-optimization/51315
+ * tree-sra.c (tree_non_aligned_mem_for_access_p): New predicate.
+ (build_accesses_from_assign): Use it instead of tree_non_aligned_mem_p.
+
2012-01-05 Uros Bizjak <ubizjak@gmail.com>
PR target/51681
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index 3821d7c..5fdcd11 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,8 @@
+
+2012-01-05 Eric Botcazou <ebotcazou@adacore.com>
+
+ * gcc.c-torture/execute/20120104-1.c: New test.
+
2012-01-05 Paul Thomas <pault@gcc.gnu.org>
PR fortran/PR48946
diff --git a/gcc/testsuite/gcc.c-torture/execute/20120105-1.c b/gcc/testsuite/gcc.c-torture/execute/20120105-1.c
new file mode 100644
index 0000000..115ba15
--- /dev/null
+++ b/gcc/testsuite/gcc.c-torture/execute/20120105-1.c
@@ -0,0 +1,24 @@
+struct __attribute__((packed)) S
+{
+ int a, b, c;
+};
+
+static int __attribute__ ((noinline,noclone))
+extract(const char *p)
+{
+ struct S s;
+ __builtin_memcpy (&s, p, sizeof(struct S));
+ return s.a;
+}
+
+volatile int i;
+
+int main (void)
+{
+ char p[sizeof(struct S) + 1];
+
+ __builtin_memset (p, 0, sizeof(struct S) + 1);
+ i = extract (p + 1);
+
+ return 0;
+}
diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c
index accbc1e..600f4d7 100644
--- a/gcc/tree-sra.c
+++ b/gcc/tree-sra.c
@@ -1095,6 +1095,25 @@ tree_non_aligned_mem_p (tree exp, unsigned int align)
return false;
}
+/* Return true if EXP is a memory reference less aligned than what the access
+ ACC would require. This is invoked only on strict-alignment targets. */
+
+static bool
+tree_non_aligned_mem_for_access_p (tree exp, struct access *acc)
+{
+ unsigned int acc_align;
+
+ /* The alignment of the access is that of its expression. However, it may
+ have been artificially increased, e.g. by a local alignment promotion,
+ so we cap it to the alignment of the type of the base, on the grounds
+ that valid sub-accesses cannot be more aligned than that. */
+ acc_align = get_object_alignment (acc->expr);
+ if (acc->base && acc_align > TYPE_ALIGN (TREE_TYPE (acc->base)))
+ acc_align = TYPE_ALIGN (TREE_TYPE (acc->base));
+
+ return tree_non_aligned_mem_p (exp, acc_align);
+}
+
/* Scan expressions occuring in STMT, create access structures for all accesses
to candidates for scalarization and remove those candidates which occur in
statements or expressions that prevent them from being split apart. Return
@@ -1123,8 +1142,7 @@ build_accesses_from_assign (gimple stmt)
if (lacc)
{
lacc->grp_assignment_write = 1;
- if (STRICT_ALIGNMENT
- && tree_non_aligned_mem_p (rhs, get_object_alignment (lhs)))
+ if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (rhs, lacc))
lacc->grp_unscalarizable_region = 1;
}
@@ -1134,8 +1152,7 @@ build_accesses_from_assign (gimple stmt)
if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
&& !is_gimple_reg_type (racc->type))
bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
- if (STRICT_ALIGNMENT
- && tree_non_aligned_mem_p (lhs, get_object_alignment (rhs)))
+ if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (lhs, racc))
racc->grp_unscalarizable_region = 1;
}