diff options
author | Matthew Wahab <matthew.wahab@arm.com> | 2015-06-29 16:09:10 +0000 |
---|---|---|
committer | Matthew Wahab <mwahab@gcc.gnu.org> | 2015-06-29 16:09:10 +0000 |
commit | a96297a28065d27559d17ebe1e0eda308a05e965 (patch) | |
tree | ba985053b0b3b9227d73b42b5e23999ab1849044 /gcc | |
parent | 6b3a1ce9f4f8b780f5fab81cddb81f517a036532 (diff) | |
download | gcc-a96297a28065d27559d17ebe1e0eda308a05e965.zip gcc-a96297a28065d27559d17ebe1e0eda308a05e965.tar.gz gcc-a96297a28065d27559d17ebe1e0eda308a05e965.tar.bz2 |
re PR target/65697 (__atomic memory barriers not strong enough for __sync builtins)
2015-06-29 Matthew Wahab <matthew.wahab@arm.com>
PR target/65697
* config/armc/arm.c (arm_split_compare_and_swap): For ARMv8, replace an
initial acquire barrier with final barrier.
From-SVN: r225133
Diffstat (limited to 'gcc')
-rw-r--r-- | gcc/ChangeLog | 6 | ||||
-rw-r--r-- | gcc/config/arm/arm.c | 10 |
2 files changed, 15 insertions, 1 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 2a7ef9d..6ecc469 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,6 +1,12 @@ 2015-06-29 Matthew Wahab <matthew.wahab@arm.com> PR target/65697 + * config/armc/arm.c (arm_split_compare_and_swap): For ARMv8, replace an + initial acquire barrier with final barrier. + +2015-06-29 Matthew Wahab <matthew.wahab@arm.com> + + PR target/65697 * config/armc/arm.c (arm_split_atomic_op): For ARMv8, replace an initial acquire barrier with final barrier. diff --git a/gcc/config/arm/arm.c b/gcc/config/arm/arm.c index 5667763..c2dce95 100644 --- a/gcc/config/arm/arm.c +++ b/gcc/config/arm/arm.c @@ -27614,6 +27614,8 @@ arm_split_compare_and_swap (rtx operands[]) scratch = operands[7]; mode = GET_MODE (mem); + bool is_armv8_sync = arm_arch8 && is_mm_sync (mod_s); + bool use_acquire = TARGET_HAVE_LDACQ && !(is_mm_relaxed (mod_s) || is_mm_consume (mod_s) || is_mm_release (mod_s)); @@ -27622,6 +27624,11 @@ arm_split_compare_and_swap (rtx operands[]) && !(is_mm_relaxed (mod_s) || is_mm_consume (mod_s) || is_mm_acquire (mod_s)); + /* For ARMv8, the load-acquire is too weak for __sync memory orders. Instead, + a full barrier is emitted after the store-release. */ + if (is_armv8_sync) + use_acquire = false; + /* Checks whether a barrier is needed and emits one accordingly. */ if (!(use_acquire || use_release)) arm_pre_atomic_barrier (mod_s); @@ -27662,7 +27669,8 @@ arm_split_compare_and_swap (rtx operands[]) emit_label (label2); /* Checks whether a barrier is needed and emits one accordingly. */ - if (!(use_acquire || use_release)) + if (is_armv8_sync + || !(use_acquire || use_release)) arm_post_atomic_barrier (mod_s); if (is_mm_relaxed (mod_f)) |